132 Commits

Author SHA1 Message Date
05e941710b Limpieza 2026-04-28 17:24:28 +02:00
01c55cba0f No se traquea el .env 2026-04-28 15:35:00 +02:00
10b2ae244c ignore 2026-04-28 15:33:08 +02:00
2dba2ebfae Query de sims por networkStatus y tiempo de suspension 2026-04-28 15:23:27 +02:00
d7eb4ad326 Gateway para francia funciona 2026-04-27 17:24:40 +02:00
d818441bde Docu 2026-04-27 16:30:22 +02:00
d063b47bec Merge branch 'main' into WEBINT-338_tiempo_suspension 2026-04-27 14:00:08 +02:00
6112de297b Bug duplicados solo en la primera linea 2026-04-27 13:46:34 +02:00
166c940295 Fix 2026-04-27 13:32:55 +02:00
246e4cb83b Comentarios 2026-04-27 13:03:47 +02:00
4517796ef3 Calculo del tiempo en suspension 2026-04-27 12:12:12 +02:00
e1450c6e97 POST -> PATCH 2026-04-22 15:22:37 +02:00
e40a19bbfb Bug de correlation_id undefined 2026-04-22 13:31:24 +02:00
fbdb64f3a1 Arreglo de bugs ordes 2026-04-22 12:59:23 +02:00
9a29f49669 Fix orders nos 2026-04-22 12:31:46 +02:00
c2081191ae Trazabilidad de nos, arreglo de orders 2026-04-21 17:39:09 +02:00
f0f3827fd0 Registro del estado/resultado de las operaciones de NOS 2026-04-21 15:51:16 +02:00
ee8f84bc57 Typescript 6 2026-04-21 13:33:01 +02:00
f95677d503 Error pageNOS 2026-04-21 12:50:54 +02:00
59b0b57ec2 Merge branch 'main' into WEBINT-334-migracion-nos 2026-04-21 10:47:08 +02:00
9174b0b6a4 Nombres de colas 2026-04-21 10:22:53 +02:00
e62c49ce91 Endpoints NOS 2026-04-21 10:11:21 +02:00
32990b4dcd Controladores y rutas 2026-04-17 15:49:53 +02:00
da2413002b Repositorio de nos completo 2026-04-17 14:06:41 +02:00
fdbb81ba64 Inicio port NOS 2026-04-16 17:46:32 +02:00
964ea6add9 Inicio migracion NOS 2026-04-16 12:44:31 +02:00
602878acf4 console log de debug 2026-04-16 12:01:31 +02:00
0aa52feaac Proxy 2026-04-16 11:51:30 +02:00
15b70309da Fix networkStatus 2026-04-15 15:11:13 +02:00
7001fccbf7 Problema de creacion de operacion de pausa/cancelacion 2026-04-15 13:50:20 +02:00
cffee785b2 logs 2026-04-15 12:48:08 +02:00
33d260310c Reactivate cuando la linea esté suspendida 2026-04-15 12:47:26 +02:00
e359acc1d5 Validaciones pausa instantaneas 2026-04-15 11:47:33 +02:00
bb4bce4a6d fix 2026-04-15 11:11:55 +02:00
eac74ef0cd Error key evento 2026-04-15 10:31:21 +02:00
1dc4eb5648 validador 2026-04-15 10:28:52 +02:00
a35a6c2b60 Error endpoint 2026-04-15 10:27:58 +02:00
1f78f4a3e1 Periodo pruebas reducidio 2026-04-15 10:18:49 +02:00
1e98559f3a Fix 2026-04-15 10:17:36 +02:00
ef0f860b9d Erro handler 2026-04-14 16:20:15 +02:00
0bff55379f tab 2026-04-14 16:15:01 +02:00
4d34308a13 Bug de logueo de operacion 2026-04-14 16:07:17 +02:00
70bf73b0a4 Error query 2026-04-10 11:11:12 +02:00
e3849d8217 Archivos de migraciones 2026-04-09 12:48:36 +02:00
d9854a12a8 Version de migrate 2026-04-09 12:31:06 +02:00
48d387a8da Migraciones antes de lanzar start 2026-04-09 12:10:40 +02:00
93d3e13793 Merge pull request 'WEBINT-328-Pausas-cacelaciones' (#1) from WEBINT-328-Pausas-cacelaciones into main
Reviewed-on: #1
2026-04-09 10:03:47 +00:00
031f5d5cf0 nombre de columna con mayus 2026-04-09 11:59:06 +02:00
047669bab2 Acabados de corregir bugs 2026-04-09 11:53:49 +02:00
5ea5939e3a Bug de finaliazacion de tareas erroneas 2026-04-09 09:08:11 +02:00
7ff3f13af4 Funcionan las suspensiones 2026-04-08 17:37:47 +02:00
a9589f578b Solucionado cierrre de pool para test 2026-04-08 14:47:57 +02:00
a27e4b30d2 Cron completo y mejora de logs 2026-04-08 13:48:57 +02:00
4168949b9e Endpoint preparados 2026-04-08 10:08:54 +02:00
e6ff54a15d Usecases 2026-04-07 17:43:17 +02:00
3956797020 Las operaciones basicas del repositorio de pause/cancel funcionan y
tienen test
2026-04-07 15:40:19 +02:00
7d88359263 Refactor de jwt y base de la bdd de pausas-cancelaciones 2026-04-07 13:20:31 +02:00
1b6da651a6 Ajustado el periodo de comprobaciones 2026-03-27 12:47:10 +01:00
9b305f887f Test .env ajustado 2026-03-27 12:24:20 +01:00
9506b9e28e Error de nombre de activacion 2026-03-27 10:59:15 +01:00
61c0edca07 Logs del envio 2026-03-27 10:52:03 +01:00
9470b5605d Pribando el env 2026-03-27 10:50:03 +01:00
9d63d23754 Mejor gestion de errores para los order 2026-03-26 12:21:28 +01:00
a95655a2a6 Completada la tarea de volcado 2026-03-26 09:29:09 +01:00
025801a689 Repositorio de lineas funciona 2026-03-25 11:51:14 +01:00
28880c4d99 Lineas activas e insertar cada una 2026-03-24 17:27:52 +01:00
5bb3bc554b doc 2026-03-24 11:20:59 +01:00
cfb907b840 Copy en jenkins 2026-03-11 12:34:41 +01:00
d5d7953fd2 Endpoint para documentacion 2026-03-11 12:31:17 +01:00
96298aab25 Docs en HTML 2026-03-11 11:35:16 +01:00
c17cca1e81 Sobreescribia el registerSobreescribia el register 2026-03-10 10:43:56 +01:00
7264efcf79 Errata 2026-03-10 10:42:32 +01:00
8934bcd603 Copia yarnrc 2026-03-10 10:39:39 +01:00
bdd08dbc56 Copiar yarnrc a docker 2026-03-10 10:37:26 +01:00
7d47fde806 Solucionado problema db-migrate 2026-03-10 10:21:53 +01:00
ad207fb732 db-migrate 2026-03-10 09:34:17 +01:00
bd9081b5bc hardcodeado el customerAccountCode 2026-03-06 11:18:30 +01:00
a429e9d14a Errata customer 2026-03-06 11:13:47 +01:00
81eb986313 Error de tipado 2026-03-06 11:06:15 +01:00
58bedc42f1 Bug de correlation_id en las llamadas a objenious 2026-03-06 11:02:18 +01:00
b97f422261 Prod 2026-03-05 10:33:38 +01:00
7a7dc33724 Error de prod 2026-03-05 10:30:43 +01:00
7743bd1f0d Migraciones a mano de momento 2026-03-05 10:17:26 +01:00
2897d7aa3c Probando a añadir el registro desde jenkins 2026-03-05 10:10:12 +01:00
0fd7eafcf3 Eliminado el clone 2026-03-05 09:09:02 +01:00
71253d216e Registry local 2026-03-05 09:04:50 +01:00
aeea6cfefd Probando con clone 2026-03-04 16:49:07 +01:00
e8eb925834 Sin paso 2026-03-04 15:54:13 +01:00
7cf9cc60e6 Test jenkins 2026-03-04 15:52:43 +01:00
1e9818d430 Yarn lock 2026-03-04 14:03:11 +01:00
39c0e87758 Mejora de las orders y actualizacion docs 2026-03-04 13:51:24 +01:00
5771972e2a Revesriendo cambio del docker 2026-03-02 17:19:07 +01:00
ea13403dc3 Error https 2026-03-02 17:16:34 +01:00
8d9a9b84b8 Cambiando el lock a mano 2026-03-02 17:15:04 +01:00
9b92f3506b Ya no hace falta la eliminacion explicita 2026-03-02 16:57:46 +01:00
1798118f6b Sin yarn.lock que copiar 2026-03-02 16:51:12 +01:00
eba2b8c569 Ya con la eliminacion del lock 2026-03-02 16:48:18 +01:00
b6b2cf6cc8 El inmutable 2026-03-02 16:46:29 +01:00
a0faa2d105 Jenkins 2026-03-02 16:45:35 +01:00
d323f804fc No copiar el lock 2026-03-02 16:41:37 +01:00
978454754c Eliminado yarn lock 2026-03-02 16:29:35 +01:00
b6091b15da docker con clean del cache 2026-03-02 16:23:05 +01:00
a6794a061b Yarn install 2026-03-02 16:01:00 +01:00
fafea3ce04 http 2026-03-02 15:55:32 +01:00
992f639f35 Prueba con otra url para gitea 2026-03-02 15:38:42 +01:00
f57309b06a Preparado despliegue 2026-03-02 15:07:30 +01:00
3be2b8f20d Nuevo nombre del container 2026-03-02 14:57:43 +01:00
4853fec7ff Fix de gestion de orders
Proceso de cancelacion verificado
2026-02-27 13:43:09 +01:00
04a6e50b7a Orders en todas las etapas 2026-02-27 11:16:45 +01:00
8ca3d095e6 Fix suspension && paso a plantilla de caso de uso 2026-02-26 17:47:32 +01:00
ca1144b55c Orders en los consumidores y gestion de los demas casos de uso 2026-02-26 17:30:32 +01:00
18422fbe38 Order para pause, activate y terminate 2026-02-25 17:42:16 +01:00
f221035c8b Visualizacion via api de las operaciones pendientes 2026-02-25 17:23:22 +01:00
02c80cd503 Orders con endpoints para monitorizacion 2026-02-25 12:20:52 +01:00
c416114c50 Arrglos por el cambio de nombre 2026-02-24 12:44:19 +01:00
e329b36933 Orders para test y flujo de migraciones mas simple 2026-02-24 11:27:47 +01:00
5c64c84e2a Todos los test de orders pasan 2026-02-23 13:35:36 +01:00
fc319372be Integracion completa de las migraciones 2026-02-23 12:04:21 +01:00
12dae135b5 Scripts de inicio con migraciones 2026-02-20 10:59:15 +01:00
b208c9c301 Preparando proceso de despliegue local para que se parezca al de
desarrollo, problema de las migraciones
2026-02-20 10:47:28 +01:00
1583ae539e Organizadas las migraciones para el despliegue 2026-02-19 17:24:47 +01:00
b6ec37c339 cambio de nombre por proposito 2026-02-17 17:24:13 +01:00
459523666f Mejora migraciones con tabla de versiones 2026-02-17 17:22:20 +01:00
8427613114 Intento de migraciones con script generador 2026-02-17 13:46:16 +01:00
5d3465fd97 Test para todo el repositorio de orders 2026-02-17 09:33:51 +01:00
39a2622cb1 base de datos de orders con repositorio y test 2026-02-16 17:31:20 +01:00
0a42e4776d Merge branch 'main' into seguimiento-tareas 2026-02-13 10:57:54 +01:00
44fea21a56 Fix de api-key y mejora del control de versiones 2026-02-13 10:55:19 +01:00
46ac54f7ab Seguimiento de ordenes desde la ingesta 2026-02-11 12:19:16 +01:00
2c9bf9dd93 Mejora del commit anterior 2026-02-10 17:28:32 +01:00
19b2958a9c Intento de mejorar el proceso de validacion de los controladores 2026-02-10 17:26:04 +01:00
a39b84e107 Validaciones para los endpints 2026-02-10 15:57:03 +01:00
157 changed files with 7270 additions and 3579 deletions

23
.env
View File

@@ -1,23 +0,0 @@
PORT=3000
RABBITMQ_USER=guest
RABBITMQ_PASSWORD=guest
ENVIORMENT=development
#RABBITMQ_HOST=rabbitmq-sim-broker
RABBITMQ_HOST=localhost
RABBITMQ_PORT=5672
RABBITMQ_USER=guest
RABBITMQ_PASSWORD=guest
RABBITMQ_SECURE=false
RABBITMQ_VHOST=sim-vhost
# Hay cosas que unificar de varios servicios
POSTGRES_DB=postgres
POSTGRES_DATABASE=postgres
#POSTGRES_HOST=postgresql-sim
POSTGRES_HOST=localhost
POSTGRES_PORT=5432
DEV_POSTGRES_PORT=5432
POSTGRES_USER=postgres
POSTGRES_PASSWORD=1234

2
.gitignore vendored
View File

@@ -20,3 +20,5 @@ node_modules
dist/*
.env

View File

@@ -3,3 +3,9 @@ compressionLevel: mixed
enableGlobalCache: false
nodeLinker: node-modules
npmScopes:
sf-alvar:
npmRegistryServer: "https://git.savefamilygps.net/api/packages/SaveFamily/npm/"
npmRegistryServer: "https://registry.npmjs.org/"

View File

@@ -12,13 +12,13 @@ La compañia a la que pertenece cada peticion y por tanto el servicio que lo va
## Decisiones pendientes
- [x] La capa worker según acción y la de operaciones de proveedores, se podrían unir en una sola con un enrutamiento por acción y compañía, pasando de tener claves `sim.[acción]` a `sim.[compañia].[acción]`. *Se ha aplicado el cambio ahora las routing keys tienen la estructura `sim.[compañia].[acción]`*
- [x] La estructura de RMQ se genera por medio del JSON, igual habría que definir cada cola en el worker que la consuma para poder añadir workers sin parar el RMQ. *Se ha aplicado el cambio, ahora solo se define en el json el broker principal para garantizar que exita sin servicios consumidores. Sin embargo tal como estan estructurdos los proyectos no es posible reiniciar solo un servicio*
- [x] La capa worker según acción y la de operaciones de proveedores, se podrían unir en una sola con un enrutamiento por acción y compañía, pasando de tener claves `sim.[acción]` a `sim.[compañia].[acción]`. _Se ha aplicado el cambio ahora las routing keys tienen la estructura `sim.[compañia].[acción]`_
- [x] La estructura de RMQ se genera por medio del JSON, igual habría que definir cada cola en el worker que la consuma para poder añadir workers sin parar el RMQ. _Se ha aplicado el cambio, ahora solo se define en el json el broker principal para garantizar que exita sin servicios consumidores. Sin embargo tal como estan estructurdos los proyectos no es posible reiniciar solo un servicio_
- [ ] Versionado de la API.
- [x] Método para sacar la compañía a partir del iccid, o buscar en la BDD si no es posible. *De momento es un objeto Map en el servicio de gateway*
- [ ] Cola de mensajes que no se han podido procesar. Distinguir según error de red; se reintenta; o error del propio mensaje; se envía a la cola de errores. v2 Se ha creado una cola de delay pero no se distingue el tipo de error, despues de n reintentos el mensaje va a la cola de dead-letter.
- [ ] Seguimiento de las peticiones de Objenious, por cada peticion hay qye hacer un seguimiento del request y de los mass action para saber si las activaciones han tenido exito. Habria que crear otra cola para consultar cada x tiempo o mejor un cron?
- [ ] Actualizar en la base de datos el estado de las peticiones de las sim y añadir el número de telefono cuando se activen o cuando se cumpla una accion.
- [x] Método para sacar la compañía a partir del iccid, o buscar en la BDD si no es posible. _De momento es un objeto Map en el servicio de gateway_
- [ ] Cola de mensajes que no se han podido procesar. Distinguir según error de red; se reintenta; o error del propio mensaje; se envía a la cola de errores. v2 Se ha creado una cola de delay pero no se distingue el tipo de error, después de n reintentos el mensaje va a la cola de dead-letter.
- [x] Seguimiento de las peticiones de Objenious, por cada peticion hay qye hacer un seguimiento del request y de los mass action para saber si las activaciones han tenido exito. Habria que crear otra cola para consultar cada x tiempo o mejor un cron?
- [x] Actualizar en la base de datos el estado de las peticiones de las sim y añadir el número de telefono cuando se activen o cuando se cumpla una accion.
## Versión con consumidores basados en la compañia
@@ -32,8 +32,14 @@ OBJENIOUS (33)2011a
## Diagrama de las colas de Rabbitmq
Actualmente la topologia de las colas consiste en un exchage principal que recibe todos los mensajes y los redistribuye en las colas de cada empresa y a la de logs. Para evitar reintentos de mensajes instantaneos, que podrian ser inutiles si algún servicio se ha caido, se ha añadido una cola de delay que alamcena los mesajes fallidos durante n segundos antes de ser reenviados al exchange principal. Si despues de n reintentos el mensaje sigue fallando se envia a la cola de dead-letter para ser procesado manualmente.
Actualmente la topología de las colas consiste en un exchage principal que recibe todos los mensajes y los redistribuye en las colas de cada empresa y a la de logs. Para evitar reintentos de mensajes instantáneos, que podrían ser inútiles si algún servicio se ha caído, se ha añadido una cola de delay que almacena los mensajes fallidos durante n segundos antes de ser reenviados al exchange principal. Si después de n reintentos el mensaje sigue fallando se envía a la cola de dead-letter para ser procesado manualmente.
![img](./imgs/diagrama-rabbit.png)
La decisión del numero de reintentos y la cola de dlx se hace en los servicios, con una configuracion global en shared.
La decisión del numero de reintentos y la cola de dlx se hace en los servicios, con una configuración global en shared.
## Puertos internos para comunicaciones entre sub-servicios
- **3000**: Gateway (sim-entrada-eventos)
- **3001**: Consumidor NOS (sim-consumidor-nos)
- **3002**: Consumidor Objenious (sim-consumidor-objenious)

View File

@@ -1,4 +1,3 @@
#/bin/bash
rm deployment/database/init.sql
cat deployment/database/*.sql >deployment/database/init.sql
docker compose -f deployment/local/docker/docker-compose.yaml --project-directory ./ build

View File

@@ -1,7 +1,8 @@
# stage base para coordinar las fases de build y ejecucion
FROM node:22-alpine AS base
WORKDIR /usr/local/app
COPY ./package.json ./yarn.lock ./
COPY ./package.json ./
#COPY ./package.json ./yarn.lock ./
RUN corepack enable && \
corepack prepare yarn@4.12.0 --activate
# copia el codigo en general

View File

@@ -4,16 +4,18 @@ CREATE TYPE status_enum AS ENUM ('noRequestID','noMassID','running','finished','
-- Tabla para gestionar las peticiones de cambio de objenious.
-- Para una o mas lineas se pueden lanzar operacione que no sabemos
-- con certeza cuando van a terminar.
-- Estas tablas está fuertemente ligadas al sistema que usa la plataforma
-- de objenioius y no debe unsarse para otra compañia.
CREATE TABLE if not exists objenious_operation (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
retry_count INT DEFAULT 0,
max_retry INT DEFAULT 5,
max_date_retry TIMESTAMP DEFAULT NULL,
retry_count INT DEFAULT 0, -- No implementado en codigo
max_retry INT DEFAULT 5, -- No implementado en codigo
max_date_retry TIMESTAMP DEFAULT NULL, -- No implementado en codigo
iccids TEXT,
request_id TEXT,
mass_action_id TEXT,
operation TEXT NOT NULL,
start_date TIMESTAMP NOT NULL DEFAULT now(),
start_date TIMESTAMP NOT NULL DEFAULT now(),
last_change_date TIMESTAMP NOT NULL DEFAULT now(),
end_date TIMESTAMP,
error TEXT,
@@ -24,7 +26,7 @@ CREATE TABLE if not exists objenious_operation (
-- operaciones pendientes para revisar
CREATE INDEX IF NOT EXISTS pending_operations
ON objenious_operation(start_date)
WHERE end_date IS NULL;
WHERE end_date IS NULL;
CREATE TABLE if not exists objenious_operation_change (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,

View File

@@ -0,0 +1,20 @@
CREATE table if not exists objenious_lines (
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
simId BIGINT UNIQUE,
status TEXT,
iccid TEXT NOT NULL,
msisdn TEXT,
imei TEXT,
imeiChangeDate TIMESTAMPTZ,
offerCode TEXT,
preactivationDate TIMESTAMPTZ, -- No viene con hora
activationDate TIMESTAMPTZ,
commercialStatus TEXT,
commercialStatusDate TIMESTAMPTZ,
billingStatus TEXT,
billingStatusChangeDate TIMESTAMPTZ,
billingActivationDate TIMESTAMPTZ,
createDate TIMESTAMPTZ,
raw JSONB,
hash TEXT
)

View File

@@ -0,0 +1,106 @@
#!/bin/bash
# --- Para que siempre se ejecute en el mismo path
cd "$(dirname "$0")"
# --- Configuración por defecto ---
MIGRATIONS_DIR="./migrations"
OUTPUT_FILE_PREFIX="esquema_final"
DB_NAME="temp_schema_build_$(date +%s)"
# --- Función de Ayuda ---
usage() {
echo "Uso: $0 -v <version> [-e <ruta_env>]"
echo " -v Versión semántica objetivo (ej: 1.2.0)"
echo " -e (Opcional) Ruta al archivo .env para cargar variables"
echo " Los archivos de verions tienen que tener el formato x.x.x_descripcion.sql (Es importante la _ para serpar las partes) "
exit 1
}
# --- Procesar Argumentos (Flags) ---
# v: obligatorio
# e: opcionar
while getopts "v:e:" opt; do
case $opt in
v) TARGET_VERSION="$OPTARG" ;;
e) ENV_PATH="$OPTARG" ;;
*) usage ;;
esac
done
# Validar que la versión esté presente
if [ -z "$TARGET_VERSION" ]; then
echo "Error: La versión es obligatoria."
usage
fi
# --- Cargar variables de entorno ---
if [ ! -z "$ENV_PATH" ]; then
if [ -f "$ENV_PATH" ]; then
echo "~> Cargando configuración desde: $ENV_PATH"
# Exporta automáticamente las variables definidas en el archivo
set -o allexport
source "$ENV_PATH"
set +o allexport
else
echo "Error: No se encontró el archivo .env en: $ENV_PATH"
exit 1
fi
else
echo "!> No se especificó archivo .env, usando variables del sistema actual"
fi
# echo "Debug: Usuario es '$PGUSER'"
# echo "Debug: Host es '$PGHOST'"
# echo "Debug: Password es '$PGPASSWORD'" # Cuidado con mostrar esto
# --- Función de limpieza (Safety Net) ---
cleanup() {
echo "~> Limpiando: Eliminando base de datos temporal '$DB_NAME'"
# Usamos las variables de conexión cargadas (si las hay)
dropdb $DB_NAME --if-exists 2>/dev/null
}
trap cleanup EXIT
# --- Inicio del Proceso ---
echo "~> Iniciando build para versión: $TARGET_VERSION"
# 1. Crear BD temporal
# Nota: Si tu .env tiene PGHOST, la BD se creará allí. Si no, en localhost.
createdb $DB_NAME
# 2. Ejecutar script base (si existe)
rm -rf init.sql
cat base/*.sql >init.sql
if [ -f "init.sql" ]; then
echo "~> Ejecutando init.sql..."
psql -d $DB_NAME -f init.sql >/dev/null
fi
# 3. Iterar y filtrar migraciones
echo "~> Aplicando migraciones hasta la versión $TARGET_VERSION..."
for f in $(ls $MIGRATIONS_DIR/*.sql | sort -V); do
FILENAME=$(basename "$f")
# Extraer versión (Asume formato V1.0.0_desc.sql o 1.0.0_desc.sql)
FILE_VER=$(echo "$FILENAME" | sed -E 's/^V//' | awk -F_ '{print $1}')
# Comparación semántica
echo "comparando $TARGET_VERSION con $FILE_VER"
LOWEST=$(echo -e "$TARGET_VERSION\n$FILE_VER" | sort -V | head -n1)
if [ "$LOWEST" == "$FILE_VER" ] || [ "$FILE_VER" == "$TARGET_VERSION" ]; then
echo "~> Aplicando: $FILENAME ($FILE_VER)"
psql -d $DB_NAME -f "$f" >/dev/null
else
echo "~> Saltando: $FILENAME ($FILE_VER) - Mayor que objetivo"
fi
done
# 4. Generar nombre de archivo de salida
OUTPUT_FILE="${OUTPUT_FILE_PREFIX}_v${TARGET_VERSION}.sql"
# 5. Extraer el esquema FINAL
echo "~> Generando $OUTPUT_FILE ---"
pg_dump -d $DB_NAME -s --no-owner --no-privileges >$OUTPUT_FILE
echo "o> Esquema guardado en $OUTPUT_FILE"

View File

@@ -1,150 +0,0 @@
-- eliminar los drop para prod
drop domain if exists imei_type cascade;
CREATE DOMAIN imei_type as varchar(15);
drop domain if exists iccid_type cascade;
CREATE DOMAIN iccid_type as varchar(22);
drop domain if exists imsi_type cascade;
CREATE DOMAIN imsi_type as varchar(15);
CREATE table if not exists sim_cards (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
imei imei_type,
iccid iccid_type,
imsi imsi_type,
user_id BIGINT,
subscription_id BIGINT,
created_at TIMESTAMP,
last_update TIMESTAMP,
deleted_at TIMESTAMP
);
CREATE TABLE if not exists sim_envio (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
codigo_origen TEXT,
codigo_distrito TEXT,
pedido_id BIGINT,
sim_id BIGINT,
fecha_envio TIMESTAMP,
fecha_email TIMESTAMP,
is_preactivado BOOLEAN,
fecha_devolucion TIMESTAMP,
created_at TIMESTAMP,
CONSTRAINT fk_sim_id
FOREIGN KEY(sim_id) REFERENCES sim_cards(id)
);
-- Mock, No es parte de SIMs
CREATE TABLE if not exists sf_subscription (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY
);
-- No habria que meterle las propiedades del tipo de subscripcion
CREATE TABLE if not exists sim_subscription_types (
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
subscription TEXT NOT NULL,
created_at TIMESTAMP,
updated_at TIMESTAMP,
deleted_at TIMESTAMP
);
CREATE TABLE if not exists sim_company (
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
name TEXT,
created_at TIMESTAMP,
updated_at TIMESTAMP,
deleted_at TIMESTAMP
);
CREATE TABLE sim_subscription (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
company_id INT,
subscription_type_id INT,
sim_id BIGINT,
order_id BIGINT,
created_at TIMESTAMP,
updated_at TIMESTAMP,
deleted_at TIMESTAMP,
CONSTRAINT fk_sim_id
FOREIGN KEY(sim_id) REFERENCES sim_cards(id),
CONSTRAINT fk_company_id
FOREIGN KEY(company_id) REFERENCES sim_company(id),
CONSTRAINT fk_subscription_type_id
FOREIGN KEY(subscription_type_id) REFERENCES sim_subscription_types(id)
);
CREATE TABLE if not exists sim_subscription_operations (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
sim_id BIGINT,
operation_type TEXT NOT NULL,
happened_at TIMESTAMP,
CONSTRAINT valid_operations CHECK (
operation_type in ('free','preactivate','activate','pause','cancel')
),
CONSTRAINT fk_subscription_id
FOREIGN KEY(sim_id)
REFERENCES sim_subscription(id)
);
-- Se supone que indica un cambio
CREATE TABLE sim_subscription_historic (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
subscription_id BIGINT,
iccid iccid_type,
company_id INT
);
CREATE TYPE status_enum AS ENUM ('noRequestID','noMassID','running','finished','error','other');
-- Tabla para gestionar las peticiones de cambio de objenious.
-- Para una o mas lineas se pueden lanzar operacione que no sabemos
-- con certeza cuando van a terminar.
CREATE TABLE if not exists objenious_operation (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
retry_count INT DEFAULT 0,
max_retry INT DEFAULT 5,
max_date_retry TIMESTAMP DEFAULT NULL,
iccids TEXT,
request_id TEXT,
mass_action_id TEXT,
operation TEXT NOT NULL,
start_date TIMESTAMP NOT NULL DEFAULT now(),
last_change_date TIMESTAMP NOT NULL DEFAULT now(),
end_date TIMESTAMP,
error TEXT,
status status_enum,
objenious_status TEXT
);
-- operaciones pendientes para revisar
CREATE INDEX IF NOT EXISTS pending_operations
ON objenious_operation(start_date)
WHERE end_date IS NULL;
CREATE TABLE if not exists objenious_operation_change (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
operation_id BIGINT,
creation_date TIMESTAMP NOT NULL DEFAULT now(),
error TEXT,
new_status status_enum,
previous_status status_enum,
new_objenious_status TEXT,
previous_objenious_status TEXT,
new_request_id TEXT,
new_mass_action_id TEXT,
CONSTRAINT fk_operation_id
FOREIGN KEY(operation_id) REFERENCES objenious_operation(id)
);
CREATE INDEX operation_change
ON objenious_operation_change(operation_id);

View File

@@ -0,0 +1,48 @@
CREATE TYPE status_enum AS ENUM ('noRequestID','noMassID','running','finished','error','other');
-- Tabla para gestionar las peticiones de cambio de objenious.
-- Para una o mas lineas se pueden lanzar operacione que no sabemos
-- con certeza cuando van a terminar.
-- Estas tablas está fuertemente ligadas al sistema que usa la plataforma
-- de objenioius y no debe unsarse para otra compañia.
CREATE TABLE if not exists objenious_operation (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
retry_count INT DEFAULT 0, -- No implementado en codigo
max_retry INT DEFAULT 5, -- No implementado en codigo
max_date_retry TIMESTAMP DEFAULT NULL, -- No implementado en codigo
iccids TEXT,
request_id TEXT,
mass_action_id TEXT,
operation TEXT NOT NULL,
start_date TIMESTAMP NOT NULL DEFAULT now(),
last_change_date TIMESTAMP NOT NULL DEFAULT now(),
end_date TIMESTAMP,
error TEXT,
status status_enum,
objenious_status TEXT
);
-- operaciones pendientes para revisar
CREATE INDEX IF NOT EXISTS pending_operations
ON objenious_operation(start_date)
WHERE end_date IS NULL;
CREATE TABLE if not exists objenious_operation_change (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
operation_id BIGINT,
creation_date TIMESTAMP NOT NULL DEFAULT now(),
error TEXT,
new_status status_enum,
previous_status status_enum,
new_objenious_status TEXT,
previous_objenious_status TEXT,
new_request_id TEXT,
new_mass_action_id TEXT,
CONSTRAINT fk_operation_id
FOREIGN KEY(operation_id) REFERENCES objenious_operation(id)
);
CREATE INDEX operation_change
ON objenious_operation_change(operation_id);

View File

@@ -0,0 +1,67 @@
-- Tablas para el seguimiento de las operaciones de SIM sin importar
-- la cmpañia.
DO $$ BEGIN
CREATE TYPE order_types AS ENUM ('activate','preactivate','cancel','pause','reactivate','unknown');
CREATE TYPE order_status AS ENUM (
'pending', -- Mensaje creado/enviado a RabbitMQ
'running', -- Consumidor ha cogido el mensaje (opcional)
'finished', -- Procesado correctamente
'failed', -- Falló, pero podría reintentarse (Pasar a delay?)
'dlx' -- Falló definitivamente y está en Dead Letter Exchange
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
CREATE TABLE IF NOT EXISTS order_tracking (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
correlation_id VARCHAR(255) NOT NULL, -- ID compartido con RabbitMQ (message_id)
exchange VARCHAR(100), -- Exchange al que se envia (de momento solo hay 1 principal sin contar delay y dlx)
routing_key VARCHAR(100), -- Routing key del mensaje
order_type order_types NOT NULL DEFAULT 'unknown',
payload JSONB, -- Duda si es optimo guardar la copia, es útil en caso de fallo
-- Campos de reintentos?
status order_status NOT NULL DEFAULT 'pending',
retry_count INT DEFAULT 0,
error_message TEXT, -- Razón del fallo
error_stacktrace TEXT,
start_date TIMESTAMP NOT NULL DEFAULT (now() at time zone 'utc'),
update_date TIMESTAMP NOT NULL DEFAULT (now() at time zone 'utc'),
finish_date TIMESTAMP
);
-- Busqueda según id de rabbit
CREATE INDEX IF NOT EXISTS idx_order_correlation
ON order_tracking(correlation_id);
-- Ordenenes que todavia no han finalizado
CREATE INDEX IF NOT EXISTS pending_orders
ON order_tracking(start_date)
WHERE order_tracking.finish_date IS NULL;
CREATE TABLE IF NOT EXISTS order_history(
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
order_id BIGINT NOT NULL,
previous_status order_status NOT NULL, -- Siempre hay un estado anterior, para casos excepcioneale "unknown"
new_status order_status NOT NULL,
change_reason TEXT,
change_date TIMESTAMP NOT NULL DEFAULT (now() at time zone 'utc'),
CONSTRAINT fk_order_id
FOREIGN KEY(order_id)
REFERENCES order_tracking(id)
ON DELETE CASCADE
);
-- fk de order
CREATE INDEX IF NOT EXISTS idx_order_id
ON order_history(order_id);
-- busquedas por fecha
CREATE INDEX IF NOT EXISTS idx_order_change_date
ON order_history(change_date);

View File

@@ -0,0 +1,12 @@
/*
* Fechas modificadas para que todas sean en base a 'UTC'
* */
ALTER TABLE objenious_operation
ALTER COLUMN start_date SET DEFAULT (now() at time zone 'utc'),
ALTER COLUMN last_change_date SET DEFAULT (now() at time zone 'utc');
ALTER TABLE objenious_operation_change
ALTER COLUMN creation_date SET DEFAULT (now() at time zone 'utc');

View File

@@ -0,0 +1,30 @@
/*
* Fechas modificadas para que se puedan hacer query en base a la zona horaria objetivo
* SELECT col_date at time zone 'cet' -- devuleve la fecha en esa zona
* SELECT col_date -- devuleve la fecha en UTC con el offset de la zona horaria
*
* */
ALTER TABLE objenious_operation
ALTER COLUMN start_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
ALTER COLUMN start_date SET DEFAULT now(),
ALTER COLUMN last_change_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
ALTER COLUMN last_change_date SET DEFAULT now(),
ALTER COLUMN end_date SET DATA TYPE TIMESTAMP WITH TIME ZONE;
ALTER TABLE objenious_operation_change
ALTER COLUMN creation_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
ALTER COLUMN creation_date SET DEFAULT now();
ALTER TABLE order_tracking
ALTER COLUMN start_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
ALTER COLUMN start_date SET DEFAULT now(),
ALTER COLUMN update_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
ALTER COLUMN update_date SET DEFAULT now(),
ALTER COLUMN finish_date SET DATA TYPE TIMESTAMP WITH TIME ZONE;
ALTER TABLE order_history
ALTER COLUMN change_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
ALTER COLUMN change_date SET DEFAULT now();

View File

@@ -0,0 +1,10 @@
/**
* A que endpoint actualizar el estado de los order, si se especificase.
* Se asume que siempre se usa POST.
* Se separa host de enpoint para dejar host como default el origen de la
* peticion anterior y poder hacer filtrados
*/
ALTER TABLE order_tracking
ADD COLUMN webhook_host TEXT,
ADD COLUMN webhook_endpoint TEXT;

View File

@@ -0,0 +1,7 @@
/**
* En la tabla de orders de objenious no hay forma de saber a a que mensaje está Solicitando
* cada operación.
*/
ALTER TABLE objenious_operation
ADD COLUMN correlation_id TEXT;

View File

@@ -0,0 +1,32 @@
/**
* Para la tarea WEBINT-328-Pausas-cacelaciones.
* Almacena las pausas/cancelaciones que no se han podido hacer porque la linea esta en
* "Test"
*/
DO $$ BEGIN
CREATE TYPE SUSPENDTERMINATE AS ENUM ('suspend','terminate');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
CREATE TABLE IF NOT EXISTS pause_cancel_tasks (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
iccid TEXT NOT NULL,
operation_type SUSPENDTERMINATE,
last_checked TIMESTAMPTZ, -- Última vez que se ha comprobado que no esté en test
activation_date TIMESTAMPTZ, -- Fecha de activacion para comprobar si ha pasdo un mes
next_check TIMESTAMPTZ, -- Si se ha comprobado se asignará la siguiente fecha de revision
completed_date TIMESTAMPTZ, -- Cuando se ha completado, para bien o mal.
error TEXT,
action_data JSONB -- datos de la operacion original.
);
-- Indice de las tareas que no han terminado
CREATE INDEX idx_pause_cancel_tasks_pending
ON pause_cancel_tasks (next_check)
WHERE completed_date IS NULL;

View File

@@ -1,17 +1,19 @@
# --- Release image ---
FROM node:22-alpine AS release
RUN apk --no-cache add git
WORKDIR /home/node/app
RUN corepack enable
COPY ./dist/packages ./packages
COPY ./.yarnrc.yml ./
COPY ./docs ./docs
# Para las migraciones
COPY ./deployment ./deployment
COPY ./package.json ./
# Force node-modules linker (no .yarnrc.yml in build context)
RUN echo 'nodeLinker: node-modules' > .yarnrc.yml
RUN yarn install
RUN yarn install
RUN mkdir -p dist && ln -sf ../packages dist/packages
@@ -19,4 +21,5 @@ COPY ./entrypoint.sh ./
RUN chmod +x entrypoint.sh
EXPOSE ${PORT:-3000}
ENTRYPOINT ["./entrypoint.sh"]

View File

@@ -1,4 +1,6 @@
#!/bin/sh
cd /home
cd /home/node/app && yarn start
cd /home/node/app
yarn migrate
yarn start

View File

@@ -22,7 +22,7 @@ pipeline {
}
stage("🧱 Building") {
steps {
sh 'rm -rf dist/'
sh 'rm -rf dist/'
sh 'yarn run build'
}
}
@@ -60,11 +60,15 @@ pipeline {
sourceFiles: "dist/**/*",
excludes: "dist/**/node_modules/**"
),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "docs/**/*",
),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "deployment/database/**/*",
removePrefix: "deployment",
),
sshTransfer(
cleanRemote: false,
@@ -88,6 +92,11 @@ pipeline {
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "package.json",
),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: ".yarnrc.yml",
),
sshTransfer(
cleanRemote: false,
execCommand: "sh $APP_REMOTE_PATH/rebuild.sh"

View File

@@ -0,0 +1,28 @@
# Stage base para coordinar las fases de build y ejecucion
FROM node:22-alpine AS base
# Hace falta para la herramienta de migraciones, cuando se publique se
# sustituira por el paquete de npm
RUN apk --no-cache add git
WORKDIR /usr/local/app
RUN corepack enable && \
corepack prepare yarn@4.12.0 --activate
COPY ./package.json ./yarn.lock ./
COPY ./packages ./packages
# copia el codigo en general
COPY tsconfig*.json ./
COPY .env* ./
COPY ./.yarnrc.yml ./
COPY ./docs ./docs
COPY ./deployment/local/docker/start.sh ./
# Copiar el archivo de migrations? porque ahora no creo que se esté lanzando nada
COPY ./deployment/database/migrations ./deployment/database/migrations
RUN yarn install && \
yarn cache clean && \
yarn build && \
chmod +x start.sh
EXPOSE ${PORT}
ENTRYPOINT [ "./start.sh" ]

View File

@@ -24,14 +24,15 @@ services:
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD}
volumes:
- ./rabbitmq_plugins/enabled_plugins:/etc/rabbitmq/enabled_plugins:ro
- ./deployment/rabbit/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:ro
- ./deployment/rabbit/definitions.json:/etc/rabbitmq/definitions.json:ro
- ./deployment/local/rabbit/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:ro
- ./deployment/local/rabbit/definitions.json:/etc/rabbitmq/definitions.json:ro
sim-gateway:
container_name: sim-gateway
sf-sims-api:
container_name: sf-sims-api
image: sf-sims-api
build:
context: ./
dockerfile: deployment/Dockerfile.dev
dockerfile: deployment/local/docker/Dockerfile.dev
args:
PORT: "${PORT:-3000}"
develop:
@@ -39,6 +40,9 @@ services:
- path: ./packages
action: sync
target: /usr/local/app/packages
- path: ./docs
action: sync
target: /usr/local/app/docs
- path: ./package.json
action: rebuild
ports:
@@ -46,19 +50,31 @@ services:
env_file:
- .env
restart: unless-stopped
healthcheck:
test:
[
"CMD-SHELL",
'node -e "fetch(''http://localhost:'' + (process.env.PORT || 3000) + ''/health'').then(r => { if (!r.ok) process.exit(1) }).catch(() => process.exit(1))"',
]
interval: 10s
timeout: 5s
retries: 5
start_period: 15s
depends_on:
rabbitmq-sim-broker:
condition: service_healthy
postgresql-sim:
condition: service_healthy
postgresql-sim:
container_name: postgresql-sim
image: postgres:16.1
env_file:
- .env
ports:
- "5432:${DEV_POSTGRES_PORT}"
- "${POSTGRES_PORT}:${POSTGRES_PORT}"
volumes:
- ./sql-data/:/var/lib/postgres/data
- ./deployment/database/init.sql:/docker-entrypoint-initdb.d/init.sql
healthcheck:
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"]
interval: 5s

View File

@@ -1,9 +1,10 @@
#!/bin/bash
cd /mnt/docker-storage/containers/savefamily/sf-shopify-orders
# cd /mnt/docker-storage/containers/savefamily/sf-shopify-orders
cd /mnt/docker-storage/containers/savefamily/sf-sims-api
docker stop sf-shopify-orders-api || true
docker rm sf-shopify-orders-api || true
docker rmi sf-shopify-orders-api || true
docker stop sf-sims-api || true
docker rm sf-sims-api || true
docker rmi sf-sims-api || true
docker compose -f docker-compose.yaml up --build -d

View File

@@ -0,0 +1,3 @@
#!/bin/sh
echo "Lanzando migraciones e iniciando servidor"
yarn migrate && yarn start

View File

@@ -1,6 +1,3 @@
default_user = guest
default_pass = guest
listeners.tcp.default = 5672
management.tcp.port = 15672

File diff suppressed because one or more lines are too long

View File

@@ -11,7 +11,7 @@ post {
}
body:form-urlencoded {
iccid: 8933201125065160406
iccid: 8935103196306448300
offer: SAVEFAMILY1
}

View File

@@ -0,0 +1,16 @@
meta {
name: Activation Email Health
type: http
seq: 8
}
post {
url: https://sf-sim-activation.savefamily.net/health
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,42 @@
meta {
name: Activation Email
type: http
seq: 6
}
post {
url: https://sf-sim-activation.savefamily.net/send-activation-mail
body: json
auth: inherit
}
headers {
x-apikey-sim-activation: 9e48c4ac-1ab0-4397-b3f3-6c239200dfe6
}
body:json {
{
"id": "11",
"retry_count": 0,
"max_retry": null,
"max_date_retry": null,
"iccids": [
"8933201125068886080"
],
"request_id": "14362",
"mass_action_id": "5208468",
"operation": "activate",
"start_date": "2026-02-13T11:08:42.499Z",
"last_change_date": "2026-02-16T09:24:36.073Z",
"end_date": "2026-02-16T09:24:36.073Z",
"error": null,
"status": "finished",
"objenious_status": "Terminé",
"msisdn": "33764399870"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -11,10 +11,45 @@ post {
}
body:form-urlencoded {
iccid: 8933201124059176320
iccid: 8933201125068887054
}
settings {
encodeUrl: true
timeout: 0
}
docs {
El endpoint recibe como body
```
{
iccid: string,
update_webhook?: string
}
```
`update_webhook` está en desarrollo, pero será donde se mande la actualizacion de la cancelación cuando haya una respuesta de la API externa.
Si la llamada tiene exito devuelve:
``` json
{
data: {
iccid: string,
message_id: string,
operation: "cancelation"
}
}
```
message_id se usará para la llamada /orders/message_id/}{message_id}
Si la llamada falla devolvera:
```json
{
errors: {
msg: string
... (campos extra de gestion del error)
}
}
```
}

16
docs/sim-api/Docs.bru Normal file
View File

@@ -0,0 +1,16 @@
meta {
name: Docs
type: http
seq: 12
}
get {
url: {{baseurl}}/docs/sim-api-documentation.html
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,26 @@
meta {
name: France Suspended Lines
type: http
seq: 17
}
get {
url: {{baseurl}}/france/lines?status=SUSPENDED&limit=100
body: none
auth: inherit
}
params:query {
status: SUSPENDED
limit: 100
}
vars:pre-request {
iccid: 8933201125065160331
~baseurl: http://localhost:3002
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,21 @@
meta {
name: France Suspended Time
type: http
seq: 15
}
get {
url: {{baseurl}}/france/lines/{{iccid}}/suspended-time
body: none
auth: inherit
}
vars:pre-request {
iccid: 8933201125065160331
~baseurl: http://localhost:3002
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,16 @@
meta {
name: Get pending orders
type: http
seq: 11
}
get {
url: {{baseurl}}/orders/pending
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,16 @@
meta {
name: Order by id
type: http
seq: 9
}
get {
url: {{baseurl}}/orders/
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,20 @@
meta {
name: Orders by message_id
type: http
seq: 12
}
get {
url: {{baseurl}}/orders/message_id/019c93d3-014a-711d-b958-03dd629be78d
body: none
auth: inherit
}
params:query {
~message_id: 019c93d3-014a-711d-b958-03dd629be78d
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -15,7 +15,7 @@ params:query {
}
body:form-urlencoded {
iccid: 8933201125065160414
iccid: 8933201125065160331
}
settings {

View File

@@ -0,0 +1,21 @@
meta {
name: ReActivate
type: http
seq: 13
}
post {
url: {{baseurl}}/sim/reActivate
body: formUrlEncoded
auth: inherit
}
body:form-urlencoded {
iccid: 8935103196306448300
~offer: SAVEFAMILY1
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,21 @@
meta {
name: Test Order
type: http
seq: 9
}
post {
url: {{baseurl}}/sim/test
body: formUrlEncoded
auth: inherit
}
body:form-urlencoded {
iccid: 8933201125065160999
offer: SAVEFAMILY1
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,34 @@
docs {
Los endpoint tienen unos campos comunes de entrada:
```ts
{
iccid: string,
update_webhook?: string
}
```
`update_webhook` está en desarrollo, pero será donde se mande la actualizacion de la cancelación cuando haya una respuesta de la API externa.
Si la llamada tiene exito devuelve:
```ts
{
data: {
iccid: string,
message_id: string,
operation: string,
}
}
```
message_id se usará para la llamada /orders/message_id/}{message_id}
Si la llamada falla devolvera:
```ts
{
errors: {
msg: string
... (campos extra de gestion del error)
}
}
```
}

View File

@@ -1,3 +1,4 @@
vars {
baseurl: http://localhost:3000
}
color: #2E8A54

View File

@@ -1,3 +1,4 @@
vars {
baseurl: https://sf-sims.savefamilygps.net
}
color: #CE4F3B

View File

@@ -0,0 +1,4 @@
vars {
baseurl: http://sim-connections.savefamilygps.net
}
color: #C77A0F

View File

@@ -0,0 +1,20 @@
meta {
name: test proxy
type: http
seq: 14
}
get {
url: {{baseurl}}/simconnections/alai/select?iccid=1111111111111111111
body: none
auth: inherit
}
params:query {
iccid: 1111111111111111111
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,23 @@
info:
name: Select Page
type: http
seq: 6
http:
method: GET
url: "{{baseurl}}/selectPage"
params:
- name: iccid
value: "8935103196306448300"
type: query
disabled: true
body:
type: json
data: ""
auth: inherit
settings:
encodeUrl: true
timeout: 0
followRedirects: true
maxRedirects: 5

25
docs/sim-nos/Select.yml Normal file
View File

@@ -0,0 +1,25 @@
info:
name: Select
type: http
seq: 5
http:
method: GET
url: "{{baseurl}}/select?iccid=8935103196306448300"
params:
- name: iccid
value: "8935103196306448300"
type: query
body:
type: json
data: |-
{
"iccid": "8933201125068890066"
}
auth: inherit
settings:
encodeUrl: true
timeout: 0
followRedirects: true
maxRedirects: 5

View File

@@ -0,0 +1,7 @@
name: local
color: "#2E8A54"
variables:
- name: baseurl
value: http://localhost:3001
- secret: true
name: token

View File

@@ -0,0 +1,7 @@
name: prod
color: "#CE4F3B"
variables:
- name: baseurl
value: https://nosconnectcenter-api.iot-x.com
- secret: true
name: token

View File

@@ -0,0 +1,10 @@
opencollection: 1.0.0
info:
name: sim-nos
bundled: false
extensions:
bruno:
ignore:
- node_modules
- .git

View File

@@ -0,0 +1,22 @@
info:
name: subscriber actions
type: http
seq: 1
http:
method: GET
url: "{{baseurl}}/subscribers/{{iccid}}/actions"
auth:
type: bearer
token: "{{token}}"
runtime:
variables:
- name: iccid
value: "8935103196306448300"
settings:
encodeUrl: true
timeout: 0
followRedirects: true
maxRedirects: 5

View File

@@ -0,0 +1,22 @@
info:
name: subscriber info
type: http
seq: 2
http:
method: GET
url: "{{baseurl}}/subscribers/{{iccid}}"
auth:
type: bearer
token: "{{token}}"
runtime:
variables:
- name: iccid
value: "8935103196306448300"
settings:
encodeUrl: true
timeout: 0
followRedirects: true
maxRedirects: 5

View File

@@ -0,0 +1,22 @@
info:
name: subscriber products available
type: http
seq: 4
http:
method: GET
url: "{{baseurl}}/subscribers/{{iccid}}/products/available"
auth:
type: bearer
token: "{{token}}"
runtime:
variables:
- name: iccid
value: "8935103196306448300"
settings:
encodeUrl: true
timeout: 0
followRedirects: true
maxRedirects: 5

View File

@@ -0,0 +1,22 @@
info:
name: subscribers
type: http
seq: 3
http:
method: GET
url: "{{baseurl}}/subscribers"
auth:
type: bearer
token: "{{token}}"
runtime:
variables:
- name: iccid
value: "8935103196306448300"
settings:
encodeUrl: true
timeout: 0
followRedirects: true
maxRedirects: 5

View File

@@ -0,0 +1,38 @@
meta {
name: Alarmas disponibles
type: http
seq: 20
}
get {
url: https://api-getway.objenious.com/ws/alarms
body: formUrlEncoded
auth: bearer
}
auth:bearer {
token: {{ws-access-token-partenaire}}
}
body:json {
{
"identifier": {
"identifiers": ["8933201124059175967"],
"identifierType": "ICCID"
}
}
}
body:form-urlencoded {
~identifier.identifierType: "ICCID"
~identifier.identifiers: ["8933201124059175967"]
}
vars:pre-request {
~id: 5187320
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -5,16 +5,16 @@ meta {
}
get {
url: https://api-getway.objenious.com/ws/lines?pageSize=10&identifier.identifierType=ICCID&identifier.identifiers=8933201125065160455
url: https://api-getway.objenious.com/ws/lines?pageSize=1000&simStatus=ACTIVATED
body: formUrlEncoded
auth: bearer
}
params:query {
pageSize: 10
identifier.identifierType: ICCID
identifier.identifiers: 8933201125065160455
~simStatus: ACTIVATED
pageSize: 1000
simStatus: ACTIVATED
~identifier.identifierType: ICCID
~identifier.identifiers: 8933201125065160455
}
auth:bearer {

View File

@@ -37,7 +37,7 @@ body:form-urlencoded {
}
vars:pre-request {
params.id: 14111
params.id: 15102
}
settings {

View File

@@ -5,13 +5,13 @@ meta {
}
get {
url: {{actionsUrl}}/massActions?massActionId=5192767
url: {{actionsUrl}}/massActions?massActionId=5363116
body: formUrlEncoded
auth: bearer
}
params:query {
massActionId: 5192767
massActionId: 5363116
~identifier.identifierType: ICCID
~identifier.identifiers: 8933201125065160463,8933201125065160422
}

1843
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,22 +1,25 @@
{
"name": "sim-eventos",
"version": "1.0.0",
"packageManager": "yarn@4.12.0",
"workspaces": [
"packages/*"
],
"scripts": {
"test": "vitest watch",
"build": "yarn workspaces foreach -A --exclude sim-consumidor-nos run build && cp .env dist/ && yarn setup:runtime",
"build": "rm -rf ./dist && yarn workspaces foreach -Api run build && cp .env dist/ && yarn setup:runtime",
"setup:runtime": "mkdir -p dist/packages/node_modules && ln -sf ../sim-shared dist/packages/node_modules/sim-shared && ln -sf ../sf-consumidor-objenious dist/packages/node_modules/sim-consumidor-objenious",
"start": "yarn setup:runtime && yarn workspaces foreach -Apiv --exclude sim-consumidor-nos run start",
"start": "yarn setup:runtime && yarn workspaces foreach -Apiv run start",
"typecheck": "npx tsc --noEmit",
"dev": "yarn workspaces foreach -Apiv --exclude sim-consumidor-nos run dev ",
"dev": "yarn workspaces foreach -Apiv run dev",
"lint": "eslint .",
"lint:fix": "eslint --fix .",
"format": "prettier --write .",
"format:check": "prettier --check ."
"format:check": "prettier --check .",
"migrate": "yarn db-migrate -e .env -m deployment/database/migrations -t 99.0.0"
},
"dependencies": {
"@sf-alvar/db-migrate": "1.0.6",
"@tsconfig/node22": "^22.0.5",
"amqp-connection-manager": "^5.0.0",
"amqplib": "^0.10.9",
@@ -25,7 +28,8 @@
"dotenv": "^17.2.3",
"express": "^5.2.1",
"pg": "^8.18.0",
"typescript": "^5.9.3",
"typescript": "^6.0.3",
"uuidv7": "^1.1.0",
"vite": "^7.3.1",
"vite-tsconfig-paths": "^6.0.5"
},

View File

@@ -1,5 +1,4 @@
export const env = {
ENVIRONMENT: process.env.ENVIORMENT,
POSTGRES_USER: process.env.POSTGRES_USER,
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD,
POSTGRES_PORT: process.env.POSTGRES_PORT,

View File

@@ -1,3 +1,3 @@
console.log("Template")
console.log(new Date().toISOString())
export default {}

View File

@@ -1,22 +0,0 @@
PORT=3000
RABBITMQ_USER=guest
RABBITMQ_PASSWORD=guest
ENVIORMENT=development
RABBITMQ_HOST=rabbitmq-sim-broker
#RABBITMQ_HOST=localhost
RABBITMQ_PORT=5672
RABBITMQ_USER=guest
RABBITMQ_PASSWORD=guest
RABBITMQ_SECURE=false
RABBITMQ_VHOST=sim-vhost
# Hay cosas que unificar de varios servicios
POSTGRES_DB=postgres
POSTGRES_DATABASE=postres
POSTGRES_HOST=postgresql-sim-1
POSTGRES_PORT=5432
DEV_POSTGRES_PORT=5432
POSTGRES_USER=postgres
POSTGRES_PASSWORD=1234

View File

@@ -1,65 +1,178 @@
import { EventBus } from "sim-shared/domain/EventBus.port.js";
import { ConsumeMessage } from "amqplib";
import { Request, Response } from "express"
import { SimNosUsecases } from "./SimNOS.usecases.js";
import { EventBus } from "sim-shared/domain/EventBus.port.js";
import { Result } from "sim-shared/domain/Result.js";
import { SimEvents } from "sim-shared/domain/SimEvents.js";
import { iccidValidator } from "./httpValidators.js";
export class SimNosController {
private eventBus: EventBus;
private activationUseCases: any;
private routes = new Map<string, () => void>([
["activate", async () => { console.log("caso de uso activate") }],
["pause", async () => { console.log("caso de uso pause") }],
["cancel", async () => { console.log("caso de uso cancel") }],
])
constructor(
eventBus: EventBus
private uscases: SimNosUsecases,
private eventBus: EventBus,
) {
this.eventBus = eventBus
// No se si hay un sistema mejor
// convertor en const () => {} para conservar el contexto??
this.recibeMsg = this.recibeMsg.bind(this)
}
public async recibeMsg(msg: ConsumeMessage | null) {
if (!this.validateActivationMsg(msg)) {
throw new Error("Error consumiendo el mensaje no es valido")
}
private validateMsg(msg: ConsumeMessage | null) {
if (msg == undefined) return false;
const msgData = this.decodeMsg(msg) as SimEvents.general
if (msgData == undefined || msgData.payload == undefined) throw new Error("Mensaje invalido")
return msgData;
}
msg = msg!
const msgParsed = JSON.parse(String(msg.content))
const msgKey = msg.fields.routingKey.split(".")
const accion = msgKey[2]
if (accion == undefined) {
console.error("La routingKey es incorrecta: " + accion)
this.eventBus.nack(msg)
return;
}
if (this.routes.get(accion) == undefined) {
console.error("No hay una ruta definida para la accion")
this.eventBus.nack(msg)
return;
private decodeMsg(msg: ConsumeMessage): object | undefined {
if (msg.content == undefined) {
console.warn('[Sim.controller] Mensaje vacío');
return undefined;
}
try {
this.routes.get(accion)!()
} catch (err) {
console.log("Error procesando el mensaje")
this.eventBus.nack(msg)
} finally {
this.eventBus.ack(msg)
// Convertir el Buffer a String (UTF-8)
const contentJson = JSON.parse(Buffer.from(msg.content).toString('utf8'))
return contentJson;
} catch (error) {
console.error('Error al decodificar JSON:', error);
console.error(Buffer.from(msg.content).toString(("utf8")))
// Aquí podrías decidir devolver el string crudo o null
return undefined;
}
}
/**
* TODO:
* - Loguear motivos de la no validacion
* Metodo duplicado se puede generalizar la a una clase sharedController con las funciones basicas
*/
private async tryUseCase<T extends any>
(msg: ConsumeMessage, usecase: () => Promise<Result<string, T>>): Promise<Result<string, T>> {
try {
const result = await usecase()
if (result.error == undefined) {
await this.eventBus.ack(msg)
return result
} else {
console.error("Error procesando el caso de uso (NOS)", result.error)
this.eventBus.nack(msg)
return result
}
} catch (e) {
console.error("Error general procesando el caso de uso (NOS)")
this.eventBus.nack(msg)
return {
error: String(e)
}
}
}
public activate() {
return async (msg: ConsumeMessage) => {
console.log("[i] Evento activate ", msg.fields)
const data = this.validateMsg(msg) as SimEvents.activation
const iccid = data.payload.iccid
const correlation_id = data.headers?.message_id
const res = await this.tryUseCase(msg, this.uscases.activate({
iccid: iccid,
correlation_id: correlation_id
}))
return res;
}
}
public suspend() {
return async (msg: ConsumeMessage) => {
console.log("Evento suspend ", msg.fields)
const data = this.validateMsg(msg) as SimEvents.suspend
const iccid = data.payload.iccid
const correlation_id = data.headers?.message_id
const res = await this.tryUseCase(msg, this.uscases.suspend({
iccid: iccid,
correlation_id: correlation_id
}))
return res;
}
}
public terminate() {
return async (msg: ConsumeMessage) => {
console.log("Evento termiante no soportado ", msg.fields)
}
}
public reActivate() {
return async (msg: ConsumeMessage) => {
console.log("Evento reActivate ", msg.fields)
const data = this.validateMsg(msg) as SimEvents.reActivation
const iccid = data.payload.iccid
const correlation_id = data.headers?.message_id
const res = await this.tryUseCase(msg, this.uscases.reactivate({
iccid: iccid,
correlation_id: correlation_id
}))
return res;
}
}
/**
* Select especificamente por REST para evitar pasar por las colas.
* La respuesta es instantanea no se tiene que registrar como operación.
*/
private validateActivationMsg(msg: ConsumeMessage | null) {
if (msg == undefined) return false;
return true;
public selectREST() {
return async (req: Request, res: Response) => {
const { query } = req
const body = { iccid: query.iccid as string }
console.log("Evento select", body)
const validateBody = iccidValidator.validate(body);
if (validateBody.error != undefined) {
res.status(402).json(validateBody)
return;
}
const iccid: string | string[] = body.iccid
if (Array.isArray(iccid)) {
// TODO: Automatizar la paginacion
//const usecaseRes = this.uscases.selectMany({ iccid })
} else {
const usecaseRes = await this.uscases.selectOne({ iccid })
if (usecaseRes.error != undefined) {
res.status(500).json(usecaseRes)
return;
} else {
res.send(usecaseRes.data)
return;
}
}
res.status(200).json(validateBody)
}
}
public selectPageREST() {
return async (req: Request, res: Response) => {
const { offset, limit, filter, orderBy } = req.query
const params = {
offset: (offset != undefined) ? Number(offset) : undefined,
limit: (limit != undefined) ? Number(limit) : undefined,
filter: (filter != undefined) ? String(filter) : undefined,
orderBy: (orderBy != undefined) ? String(orderBy) : undefined
}
const usecaseRes = await this.uscases.selectPage(params)
if (usecaseRes.error != undefined) {
res.status(500).json(usecaseRes)
return;
} else {
res.status(200).send(usecaseRes.data)
return;
}
}
}
}

View File

@@ -0,0 +1,79 @@
/**
* Dirige cada mensaje dependiendo de el tipo de acción que contenga
* Podría hacerse con varias colas, pero así se controla mejor que
* las operaciones se hagan de 1 en 1.
*/
import { ConsumeMessage } from "amqplib";
import { SimNosController } from "./SimNOS.controller.js";
import { EventBus } from "sim-shared/domain/EventBus.port.js";
import { Result } from "sim-shared/domain/Result.js";
type FuncType = ((m: ConsumeMessage) => Promise<Result<string, any>>)
export class SimNosRouter {
private readonly routes: Map<string, FuncType>;
constructor(
private readonly simController: SimNosController,
private readonly eventBus: EventBus
) {
this.routes = new Map<string, FuncType>([
//["select", undefined],
["activate", this.simController.activate()],
["pause", this.simController.suspend()],
["reactivate", this.simController.reActivate()],
//["cancel", this.simController.terminate()],
//["preActivate", this.simController.preActivate()]
]);
}
/**
* Enruta el mensaje a la acción correspondiente basándose en la routing key
* TODO: No estoy seguro que deba meter el nack aqui
* - De moemento el ack-nack se gestiona en los controller, por si acaso hay casos
* limite en
*/
public route = async (msg: ConsumeMessage | null): Promise<void> => {
if (!msg) {
console.error("[Router] Mensaje vacío");
return;
}
const action = this.extractAction(msg);
if (!action) {
console.error("[Router] La routing key no tiene una acción definida", msg.fields.routingKey);
this.eventBus.nack(msg)
return;
}
const handler = this.routes.get(action);
if (!handler) {
console.error(`[Router] La acción '${action}' no tiene un controlador asociado`);
this.eventBus.nack(msg)
return;
}
try {
console.log("[Router] Ejecutando operación:", action);
// El controlador devuelve una función (thunk) que debe ser ejecutada
const executeParams = handler(msg);
if (typeof executeParams === "function") {
const res = await executeParams;
}
} catch (error) {
console.error(`[Router] Error al ejecutar la operación '${action}':`, error);
this.eventBus.nack(msg)
}
};
private extractAction(msg: ConsumeMessage): string | undefined {
// Se asume que la acción está en la tercera posición: domain.compañia.accion
return msg.fields.routingKey.split(".")[2];
}
}

View File

@@ -0,0 +1,156 @@
/**
* Documentación de referencia:
* https://pelion-help.iot-x.com/nos/en-US/Content/API/APIReference/API%20Reference.htm?tocpath=_____7
*
* En nos el correlation_id ya va a ser obligatorio en todos los mensajes
*
* TODO:
* - Control de errores más preciso
*
*/
import { NosHttpClient } from "#infrastructure/NosHttpClient.js";
import { NosRepository } from "#infrastructure/NosRepository.js";
import { ErrorOrderDTO, FinishOrderDTO, UpdateOrderDTO } from "sim-shared/domain/Order.js";
import { Result } from "sim-shared/domain/Result.js";
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
export class SimNosUsecases {
constructor(
private httpClient: NosHttpClient,
private nosRepository: NosRepository,
private orderRepository: OrderRepository
) {
}
private async setRunning(correlation_id: string) {
// En NOS el updateOrder se hace con el correlation_id que viene en la cabecera del
// mensaje consumido
const updateData: UpdateOrderDTO = {
new_status: "running",
correlation_id: correlation_id
}
const order = await this.orderRepository.updateOrder(updateData)
return order
}
private async setFinished(correlation_id: string) {
// En NOS el updateOrder se hace con el correlation_id que viene en la cabecera del
// mensaje consumido
const updateData: FinishOrderDTO = {
correlation_id: correlation_id
}
const order = await this.orderRepository.finishOrder(updateData)
return order
}
private async setFailed(correlation_id: string, reason: string, detail?: string) {
// En NOS el updateOrder se hace con el correlation_id que viene en la cabecera del
// mensaje consumido
const updateData: ErrorOrderDTO = {
status: "failed",
correlation_id: correlation_id,
reason: reason,
error: reason,
stackTrace: detail
}
console.log("SET FAILED DATA:", updateData)
const order = await this.orderRepository.errorOrder(updateData)
console.log("SET FAILED RES:", order)
return order
}
public usecaseTemplate<T, R>(
func: (_: T) => Promise<Result<string, R>>,
args: T,
correlation_id?: string | undefined
) {
return async () => {
// Operacion pending -> running
if (correlation_id != undefined)
this.setRunning(correlation_id)
.then()
.catch(e => console.error("Error actualizando el order", e))
try {
const res = await func(args)
if (res.error != undefined) {
console.log("Error peticion: ", res, correlation_id)
if (correlation_id != undefined)
this.setFailed(correlation_id, res.error)
.then(e => console.log("failed", e))
.catch(e => console.error(e))
return res;
} else {
if (correlation_id != undefined)
this.setFinished(correlation_id).then()
return res;
}
} catch (e) {
if (correlation_id != undefined)
this.setFailed(correlation_id, "Error general de operacion de SIM (NOS) ", String(e)).then()
return {
error: "Error general de operacion de SIM (NOS) " + String(e)
}
}
}
}
public activate(args: {
iccid: string,
correlation_id?: string
}) {
return this.usecaseTemplate(
(args) => this.nosRepository.activateSim(args), args.iccid, args.correlation_id)
}
public suspend(args: {
iccid: string,
correlation_id?: string
}) {
return this.usecaseTemplate(
(args) => this.nosRepository.bar(args), args.iccid, args.correlation_id)
}
public reactivate(args: {
iccid: string,
correlation_id?: string
}) {
return this.usecaseTemplate(
(args) => this.nosRepository.unbar(args), args.iccid, args.correlation_id)
}
public terminate(args: { iccid: string }) {
throw new Error("No hay termination para NOS")
}
/* Importante: Las operaciones de lectua no dejan registro en orders */
public async selectOne(args: {
iccid: string
}) {
const res = await this.nosRepository.getLineInfo(args.iccid)
return res
}
public async selectPage(args: {
offset?: number,
limit?: number,
filter?: string,
orderBy?: string
}) {
const res = await this.nosRepository.getLinePage(args)
return res
}
/**
public selectMany(args: {
iccid: string[]
}) {
return {}
}
*/
}

View File

@@ -0,0 +1,39 @@
import { BodyValidator, Validator } from "sim-shared/aplication/BodyValidator.js";
const iccidNotNull = <Validator<{ iccid: unknown }>>{
field: "iccid",
errorMsg: "El iccid no está definido",
validationFunc: (a: { iccid: unknown }) => {
return (a.iccid != null && a.iccid != undefined)
}
}
const iccidValueOrArray = <Validator<{ iccid: unknown }>>{
field: "iccid",
errorMsg: "El iccid debe de ser un único valor o una lista",
validationFunc: (a: { iccid: unknown }) => {
return (typeof a.iccid == "string" || Array.isArray(a.iccid))
}
}
const iccidLongitudValidator = <Validator<{ iccid: string | string[] }>>{
field: "iccid",
errorMsg: "La longitud del iccid/s es incorrecta debera ser de 19 caracteres",
validationFunc: (a: { iccid: string | string[] }) => {
if (Array.isArray(a.iccid)) {
const res = (a.iccid as string[]).filter(e => e.length != 19)
if (res.length > 0) return false;
} else {
return (a.iccid as string).length == 19
}
},
}
export const iccidValidator = new BodyValidator<{ iccid: string | string[] }>(
[
iccidNotNull,
iccidValueOrArray,
iccidLongitudValidator,
]
)

View File

@@ -1,8 +1,18 @@
import { loadEnvFile } from "node:process";
loadEnvFile("../../.env")
import path from "node:path";
try {
loadEnvFile(path.join("./.env")) // base
} catch (e) {
console.error("Error cargando el .env desde ./.env")
}
try {
loadEnvFile(path.join("../../.env")) // Global
} catch (e) {
console.error("Error cargando el .env desde ../../.env")
}
export const env = {
ENVIRONMENT: process.env.ENVIORMENT,
POSTGRES_USER: process.env.POSTGRES_USER,
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD,
POSTGRES_PORT: process.env.POSTGRES_PORT,
@@ -18,5 +28,12 @@ export const env = {
RABBITMQ_SECURE: process.env.RABBITMQ_SECURE,
RABBITMQ_RETRY_INTERVAL: process.env.RABBITMQ_INTERVAL,
RABBITMQ_VHOST: String(process.env.RABBITMQ_VHOST),
APP_PORT: Number(process.env.APP_PORT),
APP_HOST: String(process.env.APP_HOST),
// ESPECIFICO NOS
NOS_BASE_URL: String(process.env.NOS_BASE_URL),
NOS_ACCESS_TOKEN: String(process.env.NOS_ACCESS_TOKEN)
};

View File

@@ -0,0 +1,72 @@
import { RabbitMQEventBus, RMQConnectionParams } from "sim-shared/infrastructure/RabbitMQEventBus.js"
import { Channel } from "amqp-connection-manager"
import { env } from "./env/env.js"
const rmqUser = env.RABBITMQ_USER
const rmqPass = env.RABBITMQ_PASSWORD
const rmqHost = env.RABBITMQ_HOST
const rmqPort = Number(env.RABBITMQ_PORT)
const rmqSecure = false
const rmqVhost = env.RABBITMQ_VHOST
export const rmqConnOptions = <RMQConnectionParams>{
username: rmqUser,
password: rmqPass,
vhost: rmqVhost,
hostname: rmqHost,
port: rmqPort,
secure: rmqSecure,
}
const QUEUES = {
NOS: "sim.nos",
NOSDLX: "sim.nos.dlx",
NOSDEL: "sim.nos.delayed",
}
const EXCHANGES = {
MAIN: "sim.exchange",
DLX: "sim.ex.nos.dlx",
DEL: "sim.ex.nos.delayed"
}
export const rabbitmqEventBus = new RabbitMQEventBus({
connectionParams: rmqConnOptions,
buildStructure: buildQueues,
maxRetry: 2,
delayedExchange: EXCHANGES.DEL,
dlxExchange: EXCHANGES.DLX
})
async function buildQueues(channel: Channel) {
const DELAY = 10 * 1000
const BASE_NOS_KEY = "sim.nos.#"
await channel.assertExchange(EXCHANGES.DEL, "topic")
await channel.assertExchange(EXCHANGES.DLX, "topic")
await channel.assertExchange(EXCHANGES.MAIN, "topic")
await channel.assertQueue(QUEUES.NOS)
await channel.assertQueue(QUEUES.NOSDLX)
await channel.assertQueue(QUEUES.NOSDEL, {
durable: true,
arguments: {
'x-message-ttl': DELAY,
'x-dead-letter-exchange': EXCHANGES.MAIN,
}
})
// Cola dead-letter
await channel.bindQueue(QUEUES.NOSDLX, EXCHANGES.DLX, "sim.nos.#")
// Cola delay
await channel.bindQueue(QUEUES.NOSDEL, EXCHANGES.DEL, BASE_NOS_KEY)
// Cola nos -> main exchange
await channel.bindQueue(QUEUES.NOS, EXCHANGES.MAIN, BASE_NOS_KEY)
}
export async function startRMQClient() {
await rabbitmqEventBus.connect()
return rabbitmqEventBus
}

View File

@@ -1,39 +0,0 @@
import { RabbitMQEventBus, RMQConnectionParams } from "sim-shared/infrastructure/RabbitMQEventBus.js"
import { env } from "./env"
const rmqUser = env.RABBITMQ_USER
const rmqPass = env.RABBITMQ_PASSWORD
const rmqHost = env.RABBITMQ_HOST
const rmqPort = Number(env.RABBITMQ_PORT)
const rmqSecure = false
const rmqVhost = env.RABBITMQ_VHOST
export const rmqConnOptions = <RMQConnectionParams>{
username: rmqUser,
password: rmqPass,
vhost: rmqVhost,
hostname: rmqHost,
port: rmqPort,
secure: rmqSecure,
}
export const rabbitmqEventBus = new RabbitMQEventBus({
connectionParams: rmqConnOptions
})
export async function startRMQClient() {
await rabbitmqEventBus.connect().catch(async e => {
console.error("Error en la conexion RMQ")
await rabbitmqEventBus.connect()
})
// Bindings especificos, deberia meterlos en la clase
try {
await rabbitmqEventBus.channel?.assertQueue("sim.nos")
} catch {
console.log("[i] Cola de sims de nos creada")
await rabbitmqEventBus.channel?.bindQueue("sim.nos", "sim.exchange", "sim.nos.*")
}
return rabbitmqEventBus
}

View File

@@ -0,0 +1,18 @@
import { Pool, QueryResult } from 'pg';
import { PgClient } from 'sim-shared/infrastructure/PgClient.js'
import { env } from './env/env.js';
// Configuracion de la conexion a la BDD, deberia ser la
// Misma para todos los servicios pero hasta que se unifique todo
// se hace una por servicio.
export const pgPool = new Pool({
user: env.POSTGRES_USER,
host: env.POSTGRES_HOST,
database: env.POSTGRES_DATABASE,
password: env.POSTGRES_PASSWORD,
port: Number(env.POSTGRES_PORT) || 5433,
});
export const pgClient = new PgClient({
pool: pgPool
})

View File

@@ -0,0 +1,131 @@
export namespace NosApi {
export type ActivationData = {
/**
The unique physical subscriber identifier:
Cellular - the ICCID
Non - IP - the EUI
Satellite - the IMEI
*/
physicalId: string,
/**
example: 447000000001
The unique network subscriber identifier:
Cellular subscriber - the MSISDN
Non - IP subscriber - the Device EUI
Satellite subscriber - the Subscription ID
*/
subscriberId: string,
/**
example: 9999
If the subscriber uses Circuit Switched Data(CSD), this field displays its data number.If the subscriber does not use CSD, this field is null.
*/
dataNumber: string
/**
example: 172.0.0.1
The subscriber IP address.
*/
ip: string
/**
example: 234150000000001
The subscriber IMSI.
*/
imsi: string
}
type OkResponse<T> = {
error?: undefined | null,
content: T
}
type ErrorResponse<E = GeneralError> = {
content?: undefined | null,
error: E
}
type GeneralError = {
children?: string[],
code: string,
message: string
}
export type ActivateResponseOK = OkResponse<ActivationData>
export type ActivateResponseError = ErrorResponse
export type ActivateResponse = ActivateResponseOK | ActivateResponseError
export type LineDataResponseOK = OkResponse<LineData>
export type LineDataResponseError = ErrorResponse
export type LineDataResponse = LineDataResponseOK | LineDataResponseError
export type PageDataResponseOk = OkResponse<LineData[]>
export type PageDataResponseError = OkResponse<LineData[]>
export type PageResponse = PageDataResponseOk | PageDataResponseError
export type LineData = {
physicalId: string
subscriberId: string
imsi: string
nickname: string
operatorCode: string
tariffName: string
lineRental: number
contractLength: number
isBarred: boolean
isActive: boolean
terminateDate: any
groupId: number
subscriberType: any
connectionDate: string
expiryDate: string
networkState: NetworkState
billingState: BillingState
operatorName: string
imei: string
dataUsage?: number
eid?: string
smdpProvider?: string
related?: {
parent: RelatedItem,
profiles: RelatedItem[]
}
}
type RelatedItem = {
physicalId: string
isEnabledOnParent: boolean
}
export type NetworkState = {
currentStateId: number
currentState: string
isTransferring: boolean
lastTransferred: number
isOnline: boolean
lastSeenOnline: number
}
export type BillingState = {
currentStateId: number
currentState: string
}
export type BarData = {
product: string
description: string
enabled: boolean
}
export type BarResponseOk = OkResponse<BarData>
export type BarResponseError = ErrorResponse
export type BarResponse = BarResponseOk | BarResponseError
}

View File

@@ -1,14 +1,74 @@
import { startRMQClient } from "#config/eventBusConfig"
import express from "express"
import cors from 'cors';
import { SimNosRouter } from "./aplication/SimNOS.router.js"
import { SimNosController } from "./aplication/SimNOS.controller.js"
import { SimNosUsecases } from "./aplication/SimNOS.usecases.js"
import { NosHttpClient } from "./infrastructure/NosHttpClient.js"
import { env } from "#config/env/env.js"
import { NosRepository } from "./infrastructure/NosRepository.js"
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
import { pgClient } from "#config/postgreConfig.js";
import { startRMQClient } from "#config/eventBus.config.js";
const RMQ_QUEUE = "sim.nos"
const NOS_BASE_URL = env.NOS_BASE_URL
const PORT = env.APP_PORT
const HOSTNAME = env.APP_HOST
async function startWorker() {
// Instancia de dependencias
const rmqClient = await startRMQClient()
const nosHttpClient = new NosHttpClient(
NOS_BASE_URL
)
const nosRepository = new NosRepository(
nosHttpClient
)
const orderRepository = new OrderRepository(
pgClient
)
const simUsecases = new SimNosUsecases(
nosHttpClient,
nosRepository,
orderRepository
)
const simController = new SimNosController(
simUsecases,
rmqClient
)
rmqClient.consume("sim.nos", simController.recibeMsg)
const simRouter = new SimNosRouter(
simController,
rmqClient
)
// RMQ
rmqClient.consume(RMQ_QUEUE, simRouter.route)
.then(() => console.log("Cliente rmq creado con exito"))
.catch(e => console.error("Error conectando con RABBITMQ", e))
// Express
const app = express()
app.use(cors());
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.get("/select", simController.selectREST())
app.get("/selectPage", simController.selectPageREST())
app.listen(PORT, HOSTNAME, (e) => {
if (e == undefined) {
console.log("[o] Servidor iniciado en el puerto %d", PORT)
} else {
console.error("Error express ", e)
}
})
}
startWorker()

View File

@@ -0,0 +1,41 @@
import axios, { AxiosInstance } from "axios";
import { env } from "#config/env/env.js"
export class NosHttpClient {
public client: AxiosInstance;
constructor(
private baseURL: string,
//private jwtManager: JWTProvider<any>
) {
this.client = axios.create({
baseURL: baseURL
})
// Interceptor para los headers fijos
this.client.interceptors.request.use(
async (config) => {
// Configuracion especifica de NOS (El token simepre es el mismo?)
const token = env.NOS_ACCESS_TOKEN;
config.headers.Authorization = `Bearer ${token}`
config.headers.set("content-type", "application/json")
return config
},
(error) => Promise.reject(error)
)
}
get post() {
return this.client.post
}
get patch() {
return this.client.patch
}
get get() {
return this.client.get
}
}

View File

@@ -0,0 +1,177 @@
import { Result } from "sim-shared/domain/Result.js";
import { NosHttpClient } from "./NosHttpClient.js";
import { NosApi } from "#domain/NosAPI.js";
import axios, { AxiosError, AxiosResponse } from "axios";
export class NosRepository {
constructor(
private httpClient: NosHttpClient
) {
}
/**
* E => Tipo de error
* T => Tipo de dato para cod 200
*
* TODO:
* - Mejor gestion de los errores
* - E no se aplica todavia por no hacer la transformacion del error
*/
private async manageNosRequest<E, T>(promise: Promise<AxiosResponse<T>>): Promise<Result<string, T>> {
try {
const res = await promise
return {
data: res.data
}
} catch (e) {
if (axios.isAxiosError(e)) {
const error = e as AxiosError
return {
error: error.code + " : " + String(error.response?.statusText)
}
} else {
return {
error: String(e)
}
}
}
}
public async getLineInfo(iccid: string): Promise<Result<string, NosApi.LineData>> {
const PATH = "/subscribers/" + iccid
console.log("PAth", PATH)
const lineRequest = this.httpClient.get<NosApi.LineDataResponseOK>(PATH)
const lineResponse = await this.manageNosRequest<string, NosApi.LineDataResponseOK>(lineRequest)
if (lineResponse.error != undefined) {
return lineResponse
} else {
return {
data: lineResponse.data.content
}
}
}
/**
* El metodo de NOS de paginar las lineas
* maximo por pagina 100, default 25
* no devuelve el offset ni el numero de elementos restantes
* hay que llevar la cuenta
*/
public async getLinePage(args: {
limit?: number,
offset?: number,
filter?: string,
orderBy?: string
}): Promise<Result<string, any>> {
const PATH = "/subscribers"
const LIMIT = 100
const options = {
limit: args.limit ?? LIMIT,
offset: args.offset ?? 0,
filter: args.filter,
orderBy: args.orderBy
}
const pageRequest = this.httpClient.get(PATH, {
params: options
})
const pageResponse = await this.manageNosRequest<string, any>(pageRequest)
if (pageResponse.error != undefined) {
return pageResponse
} else {
return {
data: pageResponse.data.content
}
}
}
public async getLinesInfo(iccid: string[]) /*Promise<Result<string, NosApi.LineData>>*/ {
throw new Error("NOS no permite buscar iccid en bulk, se puede hacer un apaño pero está en proceso")
const PATH = "/subscribers"
const LIMIT = 100
const steps = Math.ceil(iccid.length / LIMIT)
const options = {
limit: LIMIT,
offset: 0,
}
const req = this.httpClient.post<NosApi.LineDataResponseOK>(PATH)
const resp = await this.manageNosRequest<string, NosApi.LineDataResponseOK>(req)
if (resp.error != undefined) {
return resp
} else {
return {
//@ts-expect-error
data: resp.data.content
}
}
}
public async activateSim(iccid: string): Promise<Result<string, NosApi.ActivationData>> {
const PATH = '/provisioning'
const PRODUCT_ID = 1330 // No se que es, preguntar a Ivan
const data = {
productSetId: PRODUCT_ID
}
const req = this.httpClient.post<NosApi.ActivateResponseOK>(PATH, data)
const resp = await this.manageNosRequest<string, NosApi.ActivateResponseOK>(req)
if (resp.error != undefined) {
return resp
} else {
return {
data: resp.data.content
}
}
}
/**
* "A bar is a service provisioning action that results in a subscriber being blocked from accessing an operator's network. The bar remains in place until the operator is sent an unbar request."
* Se entiende que un "bar" es una suspension temporal
*/
public async bar(iccid: string) {
const PATH = `/subscribers/${iccid}/products`
const data = {
product: "BAR DN TOTAL",
action: "enable"
}
const req = this.httpClient.patch<NosApi.BarResponseOk>(PATH, data)
const resp = await this.manageNosRequest<string, NosApi.BarResponseOk>(req)
if (resp.error != undefined) {
return resp
} else {
return {
data: resp.data.content
}
}
}
public async unbar(iccid: string) {
const PATH = `/subscribers/${iccid}/products`
const data = {
product: "BAR DN TOTAL",
action: "disable"
}
const req = this.httpClient.patch<NosApi.BarResponseOk>(PATH, data)
const resp = await this.manageNosRequest<string, NosApi.BarResponseOk>(req)
if (resp.error != undefined) {
return resp
} else {
return {
data: resp.data.content
}
}
}
}

View File

@@ -1,17 +1,8 @@
{
"name": "sim-consumidor-nos",
"version": "1.0.0",
"type": "module",
"description": "consumidor generico de eventos de NOS",
"main": "index.ts",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "yarn tsc --project tsconfig.json && yarn tsc-alias && cp package.json ../../dist/packages/sim-consumidor-nos/",
"esbuild": "esbuild index.ts --platform=node",
"start": "node ../../dist/packages/sim-consumidor-nos/index.js"
},
"author": "",
"license": "ISC",
"packageManager": "yarn@4.12.0",
"imports": {
"#config/*.js": {
"types": "./config/*.ts",
@@ -21,13 +12,13 @@
"types": "./config/*.ts",
"default": "./config/*.js"
},
"#adapters/*.js": {
"types": "./adapters/*.ts",
"default": "./adapters/*.js"
"#infrastructure/*.js": {
"types": "./infrastructure/*.ts",
"default": "./infrastructure/*.js"
},
"#adapters/*": {
"types": "./adapters/*.ts",
"default": "./adapters/*.js"
"#infrastructure/*": {
"types": "./infrastructure/*.ts",
"default": "./infrastructure/*.js"
},
"#domain/*.js": {
"types": "./domain/*.ts",
@@ -37,29 +28,32 @@
"types": "./domain/*.ts",
"default": "./domain/*.js"
},
"#ports/*.js": {
"types": "./ports/*.ts",
"default": "./ports/*.js"
"#aplication/*.js": {
"types": "./aplication/*.ts",
"default": "./aplication/*.js"
},
"#ports/*": {
"types": "./ports/*.ts",
"default": "./ports/*.js"
},
"#tests/*.js": {
"types": "./__tests__/*.ts",
"default": "./__tests__/*.js"
},
"#tests/*": {
"types": "./__tests__/*.ts",
"default": "./__tests__/*.js"
"#aplication/*": {
"types": "./aplication/*.ts",
"default": "./aplication/*.js"
}
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "yarn tsc --project tsconfig.json && yarn tsc-alias && cp package.json ../../dist/packages/sim-consumidor-nos/",
"esbuild": "esbuild index.ts --platform=node",
"start": "node ../../dist/packages/sim-consumidor-nos/index.js",
"dev": "tsx watch index.ts"
},
"author": "",
"license": "ISC",
"packageManager": "yarn@4.12.0",
"dependencies": {
"@tsconfig/node22": "*",
"amqplib": "^0.10.9",
"cors": "*",
"dotenv": "*",
"express": "*",
"sim-shared": "sim-shared:*",
"typescript": "*"
},
"devDependencies": {
@@ -70,6 +64,7 @@
"@types/supertest": "*",
"prettier": "*",
"supertest": "*",
"tsc-alias": "^1.8.16",
"tsx": "*",
"vitest": "*"
}

View File

@@ -0,0 +1,11 @@
# NOS
## Particularidades de las operaciones de NOS
- Documentación de la API: [DOC](https://pelion-help.iot-x.com/nos/en/Content/API/APIReference/API%20Reference.htm?tocpath=_____7)
- No se necesita la pre-activación de las SIM.
- La suspensión y reactivación se llama "bar" y "unbar".
- El token de Authentication dura exactamente 1 año, solo se puede refrescar
desde la web.
- En la documentación la URL de la API es <https://nos-api.iot-x.com> pero la
de producción es <https://nosconnectcenter-api.iot-x.com>.

View File

@@ -0,0 +1,5 @@
## ENV PARA DATOS DE TEST - shared nunca se lanza en produccion
NOTIFICATION_URL="https://sf-sim-activation.savefamilygps.net/send-activation-mail"
# NOTIFICATION_URL="localhost"
SIM_ACTIVATION_API_KEY=9e48c4ac-1ab0-4397-b3f3-6c239200dfe6

View File

@@ -2,16 +2,40 @@
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "../../dist",
"baseUrl": ".",
"rootDir": "../../",
"paths": {
"#config/*": [
"./config/*"
],
"#infrastructure/*": [
"./infrastructure/*"
],
"#domain/*": [
"./domain/*"
],
"#aplication/*": [
"./aplication/*"
],
"config/*": [
"./config/*"
],
"infrastructure/*": [
"./infrastructure/*"
],
"domain/*": [
"./domain/*"
]
}
},
"exclude": [
"node_modules"
],
"include": [
"**/*.ts",
"src/**/*.d.ts"
"**/*.d.ts",
"../../packages/sim-shared/**/*.ts"
],
"files": [
"index.ts"
]
}
}

View File

@@ -1,8 +1,12 @@
# claves de Objenious
HOST=0.0.0.0
OBJ_PEM_PATH=./obj.pem
OBJ_AUTHORIZATION=XOc7FtwXD8hUX2SFVX94XSty8wkOmChkwDNF09O_aIxPubMDdFUdCDCB4zpzSIxi8nOcTg7r_LM_nmd5qm7uLbksf_XArjI8iAyhjKz_2BAXPhmvKs4Fc9f3vv5LDfCVrPB9lP8P7rJ66_qnWs4jvhLQxSfn29m96hgXeCf8oySdIDUjN2q9Js3KAS5LL52Ri6ryvUeO1PvMhaPQMWRqoHIqTV1wPfPtiqQwcjUPmu5GeW164Kq1JLgV3KaGzfCZ9Qv9lbv30EJrukXxWuLCAhBS0kzrBXZoWvf2pb9uh3Am_93_dDxiIGQfIap9ZU_m8ZD1HPgvZOMCY6ZkxQconQ
OBJ_CLI_ASSERTION=XOc7FtwXD8hUX2SFVX94XSty8wkOmChkwDNF09O_aIxPubMDdFUdCDCB4zpzSIxi8nOcTg7r_LM_nmd5qm7uLbksf_XArjI8iAyhjKz_2BAXPhmvKs4Fc9f3vv5LDfCVrPB9lP8P7rJ66_qnWs4jvhLQxSfn29m96hgXeCf8oySdIDUjN2q9Js3KAS5LL52Ri6ryvUeO1PvMhaPQMWRqoHIqTV1wPfPtiqQwcjUPmu5GeW164Kq1JLgV3KaGzfCZ9Qv9lbv30EJrukXxWuLCAhBS0kzrBXZoWvf2pb9uh3Am_93_dDxiIGQfIap9ZU_m8ZD1HPgvZOMCY6ZkxQconQ
OBJ_CLIENT_ID=savefamily_rest_ws
OBJ_KID=xNfbMiyL1ORXGP8lElhcv8nVaG3EJKye4Lc1YoN3I1E
OBJ_BASE_URL=https://api-getway.objenious.com/ws
OBJ_CUSTOMER_CODE=9.49411.10
//OBJ_BASE_URL=https://api-getway.objenious.com/ws/test

View File

@@ -0,0 +1,121 @@
import { describe, it, beforeEach, mock, after } from "node:test";
import assert from "node:assert";
import { SimController } from "./Sim.controller.js";
import { EventBus } from "sim-shared/domain/EventBus.port.js";
import { SimUseCases } from "./Sim.usecases.js";
import { ConsumeMessage } from "amqplib";
import { postgrClient, pgPool } from "#config/postgreConfig.js";
import { httpInstance } from "#config/httpClient.config.js";
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
import { PauseCancelTaskRepository } from "#adapters/PauseCancelTaskRepository.js";
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js";
import { ActionData } from "#domain/DTOs/objeniousapi.js";
import { ObjeniousLinesRepository } from "sim-shared/infrastructure/ObjeniousLinesRepository.js";
describe("SimController Integration Tests (Real UseCases)", () => {
let eventBusMock: any;
let controller: SimController;
let useCases: SimUseCases;
beforeEach(() => {
// Mock ONLY the event bus as requested
eventBusMock = {
publish: mock.fn(),
addSubscribers: mock.fn(),
consume: mock.fn(),
ack: mock.fn(async () => { }),
nack: mock.fn(async () => { }),
};
const operationRepository = new ObjeniousOperationsRepository(
httpInstance,
postgrClient,
);
const orderRepository = new OrderRepository(postgrClient);
const pauseRepository = new PauseCancelTaskRepository(postgrClient);
const linesRepository = new ObjeniousLinesRepository(postgrClient) // tiene que apuntar a "intranet"
useCases = new SimUseCases({
httpClient: httpInstance,
operationRepository: operationRepository,
orderRepository: orderRepository,
pauseRepository: pauseRepository,
objeniousLinesRepository: linesRepository
});
// @ts-expect-error
useCases.findActivationDate = async (data: ActionData) => new Date()
controller = new SimController(eventBusMock as unknown as EventBus, useCases);
});
const createMockMsg = (payload: any): ConsumeMessage => {
return {
content: Buffer.from(JSON.stringify(payload)),
fields: {},
properties: {
headers: {
message_id: "test-correlation-id"
}
},
} as unknown as ConsumeMessage;
};
after(async () => {
await pgPool.end();
});
describe("suspend", () => {
it("should call stage_suspend and interact with DB and EventBus", async () => {
const iccid = "test-iccid-suspend-" + Date.now();
const msg = createMockMsg({
key: "sim.test.pause",
payload: {
iccid: iccid
},
headers: {
message_id: "correlation-suspend-" + iccid
}
});
const handler = controller.suspend();
await handler(msg);
// Verify that it reached the stage_suspend logic (which adds to pauseRepository)
// We can query the DB or check if ACK was called
assert.strictEqual(eventBusMock.ack.mock.callCount(), 1, "Message should be ACKed on success");
assert.strictEqual(eventBusMock.nack.mock.callCount(), 0, "Message should not be NACKed");
});
});
describe("terminate", () => {
it("should call stage_terminate and interact with DB and EventBus", async () => {
const iccid = "test-iccid-terminate-" + Date.now();
const msg = createMockMsg({
key: "sim.test.pause",
payload: {
iccid: iccid
},
headers: {
message_id: "correlation-terminate-" + iccid
}
});
const handler = controller.terminate();
await handler(msg);
assert.strictEqual(eventBusMock.ack.mock.callCount(), 1, "Message should be ACKed on success");
assert.strictEqual(eventBusMock.nack.mock.callCount(), 0, "Message should not be NACKed");
});
});
describe("Error Handling", () => {
it("should nack if message is invalid", async () => {
const msg = {
content: Buffer.from("invalid json"),
fields: {},
properties: {},
} as unknown as ConsumeMessage;
const handler = controller.suspend();
await assert.rejects(handler(msg), "Error de suspension consumiendo el mensaje no es valido");
});
});
});

View File

@@ -3,6 +3,10 @@ import { ConsumeMessage } from "amqplib";
import { SimUseCases } from "./Sim.usecases.js";
import { SimEvents } from "sim-shared/domain/SimEvents.js";
import { Result } from "sim-shared/domain/Result.js";
import { ActionData } from "#domain/DTOs/objeniousapi.js";
import { Request, Response } from "express"
import { PaginationArgs, QueryPaginationArgs } from "sim-shared/domain/PaginationArgs.js";
import { paginationValidator } from "./httpValidators.js";
/**
* La clase usa generadores de funciones para mantener el contexto
@@ -20,7 +24,6 @@ export class SimController {
) {
this.eventBus = eventBus
this.useCases = useCases
}
private decodeMsg(msg: ConsumeMessage): object | undefined {
@@ -36,6 +39,7 @@ export class SimController {
} catch (error) {
console.error('Error al decodificar JSON:', error);
console.error(Buffer.from(msg.content).toString(("utf8")))
// Aquí podrías decidir devolver el string crudo o null
return undefined;
}
@@ -64,6 +68,8 @@ export class SimController {
}
public activate() {
const DUE_DATE_SECONDS = 2 * 60
return async (msg: ConsumeMessage) => {
let msgData;
try {
@@ -80,9 +86,10 @@ export class SimController {
throw new Error("Error activando la sim, no se ha especificado la oferta")
}
this.tryUseCase(msg, this.useCases.activate({
dueDate: this.genDueDate(2 * 60).toISOString(),
customerAccountCode: "9.49411.10", // TODO: Al .env
const resp = await this.tryUseCase(msg, this.useCases.activate({
correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(DUE_DATE_SECONDS).toISOString(),
customerAccountCode: "9.49411.10",
identifier: {
identifierType: "ICCID",
identifiers: [iccid]
@@ -92,6 +99,11 @@ export class SimController {
services: []
}
}))
// TODO:
// - Crear un registro de operación
// - Si ha salido bien id de operación -> webhook?
// - Si ha salido mal notificar solo cuando se manda a dlx ??
}
}
@@ -99,7 +111,7 @@ export class SimController {
return async (msg: ConsumeMessage) => {
let msgData;
try {
msgData = this.validateMsg(msg) as SimEvents.pause
msgData = this.validateMsg(msg) as SimEvents.suspend
} catch (e) {
throw new Error("Error de preactivacion consumiendo el mensaje no es valido" + String(e))
}
@@ -109,7 +121,8 @@ export class SimController {
}
const iccid = msgData.payload.iccid
this.tryUseCase(msg, this.useCases.preActivate({
const res = await this.tryUseCase(msg, this.useCases.preActivate({
correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(2 * 60).toISOString(),
identifier: {
identifierType: "ICCID",
@@ -125,7 +138,7 @@ export class SimController {
return async (msg: ConsumeMessage) => {
let msgData;
try {
msgData = this.validateMsg(msg) as SimEvents.pause
msgData = this.validateMsg(msg) as SimEvents.suspend
} catch (e) {
throw new Error("Error de reactivacion consumiendo el mensaje no es valido" + String(e))
}
@@ -135,7 +148,8 @@ export class SimController {
}
const iccid = msgData.payload.iccid
this.tryUseCase(msg, this.useCases.suspend({
const res = await this.tryUseCase(msg, this.useCases.reActivate({
correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(2 * 60).toISOString(),
identifier: {
identifierType: "ICCID",
@@ -146,11 +160,14 @@ export class SimController {
}
}
/**
* Lo mismo que pause
*/
public suspend() {
return async (msg: ConsumeMessage) => {
let msgData;
try {
msgData = this.validateMsg(msg) as SimEvents.pause
msgData = this.validateMsg(msg) as SimEvents.suspend
} catch (e) {
throw new Error("Error de suspension consumiendo el mensaje no es valido" + String(e))
}
@@ -160,13 +177,18 @@ export class SimController {
}
const iccid = msgData.payload.iccid
this.tryUseCase(msg, this.useCases.suspend({
const suspendData: ActionData = {
correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(2 * 60).toISOString(),
identifier: {
identifierType: "ICCID",
identifiers: [iccid]
identifiers: [iccid] // Por algún motivo solo he puesto un iccd por identifier
}
}))
}
const useCaseRes = await this.tryUseCase(msg, this.useCases.stage_suspend(suspendData))
/*
const res = await this.tryUseCase(msg, this.useCases.suspend(actionData))
*/
}
}
@@ -175,7 +197,7 @@ export class SimController {
return async (msg: ConsumeMessage) => {
let msgData;
try {
msgData = this.validateMsg(msg) as SimEvents.pause
msgData = this.validateMsg(msg) as SimEvents.suspend
} catch (e) {
throw new Error("Error consumiendo el mensaje no es valido" + String(e))
}
@@ -183,15 +205,50 @@ export class SimController {
if (msgData == undefined) {
return Promise.reject("Mensaje invalido")
}
const iccid = msgData.payload.iccid
console.log("Mensaje procesado", String(msgData))
this.tryUseCase(msg, this.useCases.terminate({
const terminateActionData: ActionData = {
correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(2 * 60).toISOString(),
identifier: {
identifierType: "ICCID",
identifiers: [iccid]
}
}))
}
//const res = await this.tryUseCase(msg, this.useCases.terminate(terminateActionData))
const res = await this.tryUseCase(msg, this.useCases.stage_terminate(terminateActionData))
}
}
public queryLines() {
const DEFAULT_LIMIT = 1000
const DEFAULT_OFFSET = 0
return async (req: Request, res: Response) => {
const queryParams = req.query
const paginationArgs: QueryPaginationArgs = {
limit: queryParams.limit as string | undefined,
offset: queryParams.offset as string | undefined
}
const validationRes = paginationValidator.validate(paginationArgs)
if (validationRes.error != undefined) {
res.status(402).json(validationRes)
return;
}
const paginationValues = {
limit: (queryParams.limit != undefined) ? Number(queryParams.limit) : DEFAULT_LIMIT,
offset: (queryParams.offset != undefined) ? Number(queryParams.offset) : DEFAULT_OFFSET
}
const status = req.query.status
const queryRes = await this.useCases.getLinesByQuery({ status: status as string | undefined }, paginationValues)
res.json(queryRes)
}
}
@@ -213,3 +270,4 @@ export class SimController {
return dueDate
}
}

View File

@@ -18,8 +18,8 @@ export class SimRouter {
this.routes = new Map([
["activate", this.simController.activate()],
["pause", this.simController.suspend()],
["cancel", this.simController.terminate()], // terminate
["reActivate", this.simController.reActivate()],
["cancel", this.simController.terminate()],
["reactivate", this.simController.reActivate()],
["preActivate", this.simController.preActivate()]
]);
}
@@ -27,6 +27,8 @@ export class SimRouter {
/**
* Enruta el mensaje a la acción correspondiente basándose en la routing key
* TODO: No estoy seguro que deba meter el nack aqui
* - De moemento el ack-nack se gestiona en los controller, por si acaso hay casos
* limite en
*/
public route = async (msg: ConsumeMessage | null): Promise<void> => {
if (!msg) {

View File

@@ -3,6 +3,13 @@ import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js"
import { AxiosError } from "axios"
import { Result } from "sim-shared/domain/Result.js"
import { ObjeniousOperation, IOperationsRepository as OperationsRepositoryPort } from "sim-shared/domain/operationsRepository.port.js"
import assert from "node:assert"
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
import { CreatePauseCancelTaskDTO, PauseCancelTaskRepository } from "#adapters/PauseCancelTaskRepository.js"
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js"
import { ObjeniousLinesRepository } from "sim-shared/infrastructure/ObjeniousLinesRepository.js"
import { error } from "node:console"
import { ObjeniousLine, ObjeniousLineDb } from "sim-shared/domain/objeniousLine.js"
// TODO:
// - Pasar a un archivo de DTOs
@@ -10,26 +17,123 @@ import { ObjeniousOperation, IOperationsRepository as OperationsRepositoryPort }
export class SimUseCases {
private readonly httpClient: HttpClient
private readonly operationRepository: OperationsRepositoryPort
private readonly objeniousRepository: ObjeniousOperationsRepository
private readonly orderRepository: OrderRepository
private readonly pauseRepository: PauseCancelTaskRepository
private readonly objeniousLinesRepository: ObjeniousLinesRepository
constructor(args: {
httpClient: HttpClient,
operationRepository: OperationsRepositoryPort
operationRepository: ObjeniousOperationsRepository,
orderRepository: OrderRepository,
pauseRepository: PauseCancelTaskRepository,
objeniousLinesRepository: ObjeniousLinesRepository
}) {
this.httpClient = args.httpClient
this.operationRepository = args.operationRepository
this.objeniousRepository = args.operationRepository
this.orderRepository = args.orderRepository
this.pauseRepository = args.pauseRepository
this.objeniousLinesRepository = args.objeniousLinesRepository
}
private async logOperation(data: ObjeniousOperation) {
await this.operationRepository.createOperation({
await this.objeniousRepository.createOperation({
...data
})
}
/**
* Garantiza el flujo de todos los casos de uso de:
* - Petición según la acción
* - Control de errores
* - Siempre devuelve un Result
* - Almacena la operacion en la base de datos
* - Actualiza el estado del order
*
* Necesita:
* - Mas control según el codigo de error
*/
private generateUseCase<
PAYLOAD,
RESPONSETYPE extends { requestId: string }
>(args: {
correlation_id?: string,
url: string,
operation: string,
operationPayload: PAYLOAD,
iccid: string
onError?: (_: any) => void
// on code response??
}): () => Promise<Result<string, boolean>> {
return async () => {
const req = this.httpClient.client.post<RESPONSETYPE>(args.url, {
...args.operationPayload
})
try {
const response = await req;
if (response.status == 200) {
assert(response.data.requestId != undefined)
// Creacion de la operacion inicial, antes de tener los datos
const operation: ObjeniousOperation = {
operation: args.operation,
iccids: String(args.iccid),
status: "noMassID",
request_id: response.data.requestId,
correlation_id: args.correlation_id
}
// TODO: Esto tiene poco sentido si la operacion ya se
// tenia que haber creado en el generador
this.logOperation(operation)
.then().catch(e => console.error("Error login operation", e))
if (args.correlation_id != undefined) {
this.orderRepository.updateOrder({
correlation_id: args.correlation_id!,
new_status: "running", // Siempre es runing la primera vez que se consume
})
.then(e => console.log("Order actualizado: ", e))
.catch(e => console.error("Error actualizando order", args.correlation_id))
}
return <Result<string, boolean>>{
error: undefined,
data: true
}
} else {
return {
error: String(response.status),
data: undefined
}
}
} catch (error) {
console.error(`[Sim.usecase] Error ${args.operation}`, (error as AxiosError).response?.status)
return {
error: "Error general de la peticion",
data: undefined
}
}
}
}
public activate(activationData: ActivationData): () => Promise<Result<string, boolean>> {
const OPERATION_URL = "/actions/activateLine"
return async () => {
const iccid = activationData.identifier.identifiers
// Comporbación excepcional para saber si la linea está suspendida
const statusLinea = await this.objeniousRepository.getLinesAPI("ICCID", [String(iccid)])
if (statusLinea.data != undefined && statusLinea.data[0].status.networkStatus == "SUSPENDED") {
const res = await this.reActivate(activationData)()
return res;
}
const req = this.httpClient.client.post(OPERATION_URL, {
...activationData
dueDate: activationData.dueDate,
identifier: activationData.identifier,
customerAccountCode: activationData.customerAccountCode,
offer: activationData.offer
})
try {
@@ -51,8 +155,6 @@ export class SimUseCases {
error: undefined,
data: true
}
} else {
// muy mejorable el control de errores
return {
@@ -82,6 +184,7 @@ export class SimUseCases {
if (resp.status == 200) {
console.log("Sim preactivada con exito", resp.data)
const operation: ObjeniousOperation = {
correlation_id: preActivateData.correlation_id,
operation: "preActivate",
iccids: String(preActivateData.identifier.identifiers),
status: "noMassID",
@@ -96,38 +199,59 @@ export class SimUseCases {
} else {
return <Result<string, boolean>>{
error: String(resp.status),
data: true
data: undefined
}
}
} catch (error) {
console.error("Error preactivacion", preActivateData)
return <Result<string, boolean>>{
error: "Error preactivando la sim" + preActivateData.identifier,
data: true
data: undefined
}
}
}
}
public reActivate(pauseData: ActionData): () => Promise<Result<string, boolean>> {
public reActivate(reactivateData: ActionData): () => Promise<Result<string, boolean>> {
const OPERATION_URL = "/actions/reactivateLine"
return async () => {
const req = this.httpClient.client.post(OPERATION_URL, {
...pauseData
...reactivateData
})
try {
const e = await req
console.log("Sim reactivada con exito", e.data)
return <Result<string, boolean>>{
error: undefined,
data: true
const response = await req
// Creacion de la operacion inicial, antes de tener los datos
const operation: ObjeniousOperation = {
operation: "reactivate",
iccids: reactivateData.identifier.identifiers[0],
status: "noMassID",
request_id: response.data.requestId,
correlation_id: reactivateData.correlation_id
}
// TODO: Esto tiene poco sentido si la operacion ya se
// tenia que haber creado en el generador
this.logOperation(operation)
.then().catch(e => console.error("Error login operation", e))
if (response.status == 200) {
console.log("[o] Sim solicitud de reactivacion ", response.data)
return <Result<string, boolean>>{
error: undefined,
data: true
}
} else {
return {
error: String(response.status),
data: undefined
}
}
} catch (error) {
console.error("Error reactivacion", error)
console.error("[x] Error reactivacion", (error as AxiosError).response?.status)
return <Result<string, boolean>>{
error: "Error reactivando la sim" + pauseData.identifier,
data: true
error: "Error reactivando la sim" + reactivateData.identifier,
data: undefined
}
}
}
@@ -135,54 +259,233 @@ export class SimUseCases {
public suspend(suspendData: ActionData): () => Promise<Result<string, boolean>> {
const OPERATION_URL = "/actions/suspendLine"
return async () => {
const req = this.httpClient.client.post(OPERATION_URL, {
...suspendData
})
return this.generateUseCase({
correlation_id: suspendData.correlation_id,
operationPayload: {
dueDate: suspendData.dueDate,
identifier: suspendData.identifier
},
url: OPERATION_URL,
iccid: suspendData.identifier.identifiers[0], //
operation: "suspend"
})
}
/**
* Metodo muy especifico para obtener la fecha e activacion o en su defecto
* la actual para saber cuando se va a completar el periodo de test de una linea
*/
private async findActivationDate(actionData: ActionData) {
const iccid = actionData.identifier.identifiers
const lineData = await this.objeniousRepository.getLinesAPI("ICCID", iccid)
let activationDate = new Date()
// Si no se pueden sacar datos de la linea guardo momentaneamente el error
// pero no se cancela la operacion, el error puede ser de objenious y no nos
// puede afectar
//console.log("LineData", lineData.data)
if (lineData.error != undefined) {
console.error(lineData.error)
} else {
const activationDateStr = lineData.data[0].status.activationDate
if (activationDateStr != undefined && activationDateStr != "") {
activationDate = new Date(activationDateStr)
}
}
return activationDate
}
/**
* Paso previo a la suspension para evitar errores cuando el billing es test
*/
public stage_suspend(suspendData: ActionData): () => Promise<Result<string, boolean>> {
return async (): Promise<Result<string, boolean>> => {
const correlation_id = suspendData.correlation_id
const iccid = suspendData.identifier.identifiers
const operation: ObjeniousOperation = {
operation: "suspend",
iccids: iccid[0],
status: "running",
correlation_id: correlation_id
}
// No se registra hasta que no pase por la tabla de pausas
// this.logOperation(operation)
// .then().catch(e => console.error("Error login operation", e))
const fail = (error: string) => {
console.error("[Sim.usecases]", error)
if (correlation_id != undefined) {
this.orderRepository.updateOrder({
correlation_id: correlation_id,
new_status: "failed"
})
}
}
// TODO REGISTRAR EL ORDER
/*
if (correlation_id != undefined) {
await this.orderRepository.createOrder({
correlation_id: correlation_id,
order_type: "pause"
})
}
*/
let activationDate;
try {
const e = await req
console.log("Sim pausada/suspendida con exito", e.data)
return <Result<string, boolean>>{
error: undefined,
data: true
}
} catch (error) {
console.error("[Pausa Use case] Error pausa")
activationDate = await this.findActivationDate(suspendData)
} catch (e) {
return {
error: "Error general pausando/suspendiendo la sim" + suspendData.identifier,
data: undefined
error: String(e)
}
}
const newTask: CreatePauseCancelTaskDTO = {
iccid: iccid[0],
activation_date: activationDate,
next_check: undefined, // Que se haga instantaneamente al ser la primera
operation_type: "suspend",
action_data: suspendData
}
const taskCreated = await this.pauseRepository.addTask(newTask)
// Caso que la task no se pueda crear en la BDD
if (taskCreated.error != undefined) {
fail(taskCreated.error)
return {
error: taskCreated.error
}
}
// Caso que se haya creado en la BDD
if (correlation_id != undefined) {
this.orderRepository.updateOrder({
correlation_id: correlation_id,
new_status: "running"
})
}
return {
data: true
}
}
}
/**
* Paso previo a la suspension para evitar errores cuando el billing es test
*/
public stage_terminate(terminateData: ActionData): () => Promise<Result<string, boolean>> {
return async (): Promise<Result<string, boolean>> => {
const correlation_id = terminateData.correlation_id
const iccid = terminateData.identifier.identifiers[0]
const activationDate = await this.findActivationDate(terminateData)
const newTask: CreatePauseCancelTaskDTO = {
iccid: iccid,
activation_date: activationDate,
next_check: undefined, // Que se haga instantaneamente al ser la primera
operation_type: "terminate",
action_data: terminateData
}
const taskCreated = await this.pauseRepository.addTask(newTask)
const operation: ObjeniousOperation = {
operation: "terminate",
iccids: iccid,
status: "running",
correlation_id: correlation_id
}
/**
this.logOperation(operation)
.then().catch(e => console.error("Error login operation", e))
*/
// Caso que la task no se pueda crear en la BDD
if (taskCreated.error != undefined) {
console.error("[Sim.usecases]", taskCreated.error)
if (correlation_id != undefined) {
this.orderRepository.updateOrder({
correlation_id: correlation_id,
new_status: "failed"
})
}
return {
error: taskCreated.error
}
}
// Caso que se haya creado en la BDD
if (correlation_id != undefined) {
this.orderRepository.updateOrder({
correlation_id: correlation_id,
new_status: "running"
})
}
return {
data: true
}
}
}
public terminate(terminationData: ActionData): () => Promise<Result<string, boolean>> {
const OPERATION_URL = "/actions/terminateLine"
return async () => {
const req = this.httpClient.client.post(OPERATION_URL, {
...terminationData
})
return this.generateUseCase({
correlation_id: terminationData.correlation_id,
operationPayload: {
dueDate: terminationData.dueDate,
identifier: terminationData.identifier
},
url: OPERATION_URL,
iccid: terminationData.identifier.identifiers[0],
operation: "terminate"
})
}
// TODO: para cuando estemos listos.
throw new Error("Peticion no reversible desactivada de momento")
/**
* Calcula el tiempo que una linea ha estado en suspensión
*/
public async getSuspendedTime(iccid: string):
Promise<Result<string, { total_milliseconds: number, total_days: number }>> {
try {
const result = await this.objeniousRepository.getSuspendedTime(iccid);
if (result.error !== undefined) {
return { error: result.error as string, data: undefined };
}
return {
data: {
total_milliseconds: result.data!.total_milliseconds,
total_days: result.data!.total_days
}
};
} catch (error) {
console.error("[Sim.usecases] Error getting suspended time", error);
return { error: "Error getting suspended time", data: undefined };
}
}
try {
const e = await req
console.log("Sim cancelada con exito", e.data)
return <Result<string, boolean>>{
error: undefined,
data: true
}
} catch (error) {
console.error("Error pausa", error)
return <Result<string, boolean>>{
error: "Error cancelando/terminate la sim" + terminationData.identifier,
data: undefined
}
/**
* Busqueda de líneas **en nuestro volcado** según una query y con paginacion
*/
public async getLinesByQuery(query: { status?: string | undefined }, pagination: { limit: number, offset: number })
: Promise<Result<string, {
data: ObjeniousLineDb[],
offset: number,
rowCount: number
}>> {
try {
const linesQuery = await this.objeniousLinesRepository.getLinesByStatus(query, pagination)
return {
data: linesQuery,
}
} catch (e) {
return {
error: String(e)
}
}
}
}

View File

@@ -0,0 +1,18 @@
import { BodyValidator, Validator } from "sim-shared/aplication/BodyValidator.js";
import { QueryPaginationArgs } from "sim-shared/domain/PaginationArgs.js";
const limitPositiveOrUndefined = <Validator<QueryPaginationArgs>>{
field: "limit",
validationFunc: (args) => (args.limit == undefined || !isNaN(+args.limit) && parseInt(args.limit) >= 0),
errorMsg: "El campo limit debe ser un numero o undefined (default 0)"
}
const offsetPositiveOrUndefined = <Validator<QueryPaginationArgs>>{
field: "offset",
validationFunc: (args) => (args.offset == undefined || isNaN(+args.offset) && parseInt(args.offset) >= 1),
errorMsg: "El campo offset debe ser un numero o undefined (default 0)"
}
export const paginationValidator = new BodyValidator<QueryPaginationArgs & {}>([
limitPositiveOrUndefined,
offsetPositiveOrUndefined
])

View File

@@ -4,7 +4,10 @@ import path from "node:path";
loadEnvFile(path.join("../../.env")) // Global
loadEnvFile(path.join("./.env")) // base
export const env = {
PORT: parseInt(process.env.OBJENIOUS_CONSUMER_PORT || "3002"),
ENVIRONMENT: process.env.ENVIORMENT,
POSTGRES_USER: process.env.POSTGRES_USER,
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD,
@@ -28,7 +31,7 @@ export const env = {
OBJ_CLI_ASSERTION: String(process.env.OBJ_CLI_ASSERTION),
OBJ_CLIENT_ID: String(process.env.OBJ_CLIENT_ID),
OBJ_KID: String(process.env.OBJ_KID),
OBJ_BASE_URL: String(process.env.OBJ_BASE_URL)
OBJ_BASE_URL: String(process.env.OBJ_BASE_URL),
OBJ_CUSTOMER_CODE: String(process.env.OBJ_CUSTOMER_CODE)
};

View File

@@ -18,24 +18,26 @@ export const rmqConnOptions = <RMQConnectionParams>{
secure: rmqSecure,
}
export const QUEUES = {
OBJ: "sim.objenious",
OBJDLX: "sim.objenious.dlx",
OBJDEL: "sim.objenious.delayed",
}
export const EXCHANGES = {
MAIN: "sim.exchange",
DLX: "sim.ex.objenious.dlx",
DEL: "sim.ex.objenious.delayed"
}
export const rabbitmqEventBus = new RabbitMQEventBus({
connectionParams: rmqConnOptions,
buildStructure: buildQueues,
maxRetry: 5
maxRetry: 5,
delayedExchange: EXCHANGES.DEL,
dlxExchange: EXCHANGES.DLX
})
async function buildQueues(channel: Channel) {
const QUEUES = {
OBJ: "sim.objenious",
DLX: "sim.objenious.dlx",
DEL: "sim.objenious.delayed"
}
const EXCHANGES = {
MAIN: "sim.exchange",
DLX: "sim.ex.objenious.dlx",
DEL: "sim.ex.objenious.delayed"
}
const DELAY = 10 * 1000
const BASE_OBENIOUS_KEY = "sim.objenious.#"
@@ -45,8 +47,8 @@ async function buildQueues(channel: Channel) {
await channel.assertExchange(EXCHANGES.MAIN, "topic")
await channel.assertQueue(QUEUES.OBJ)
await channel.assertQueue(QUEUES.DLX)
await channel.assertQueue(QUEUES.DEL, {
await channel.assertQueue(QUEUES.OBJDLX)
await channel.assertQueue(QUEUES.OBJDEL, {
durable: true,
arguments: {
'x-message-ttl': DELAY,
@@ -55,9 +57,9 @@ async function buildQueues(channel: Channel) {
})
// Cola dead-letter
await channel.bindQueue(QUEUES.DLX, EXCHANGES.DLX, "sim.objenious.#")
await channel.bindQueue(QUEUES.OBJDLX, EXCHANGES.DLX, "sim.objenious.#")
// Cola delay
await channel.bindQueue(QUEUES.DEL, EXCHANGES.DEL, BASE_OBENIOUS_KEY)
await channel.bindQueue(QUEUES.OBJDEL, EXCHANGES.DEL, BASE_OBENIOUS_KEY)
// Cola objenious -> main exchange
await channel.bindQueue(QUEUES.OBJ, EXCHANGES.MAIN, BASE_OBENIOUS_KEY)

View File

@@ -1,6 +1,6 @@
import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js"
import { JWTService } from "../aplication/JWT.service.js"
import { env } from "./env/index.js"
import { jwtService } from "./jwtService.config.js"
const OBJ_BASE_URL = env.OBJ_BASE_URL
@@ -9,5 +9,5 @@ export const httpInstance = new HttpClient({
headers: {
"content-type": " application/json; charset=utf-8"
},
jwtManager: new JWTService()
jwtManager: jwtService
})

View File

@@ -0,0 +1,20 @@
/**
* Cliente de postgres para la intranet. Se usa solo porque hace falta para el
* volcado de datos, si se usa en mas partes algo estás haciendo mal.
*/
import { Pool } from 'pg';
import { PgClient } from 'sim-shared/infrastructure/PgClient.js'
import { env } from './env/index.js';
export const pgPoolIntranet = new Pool({
user: env.POSTGRES_USER,
host: env.POSTGRES_HOST,
database: "intranet",
password: env.POSTGRES_PASSWORD,
port: Number(env.POSTGRES_PORT) || 5432,
});
export const postgresClientIntranet = new PgClient({
pool: pgPoolIntranet
})

View File

@@ -0,0 +1,59 @@
import { GrantAccessRequestBody, JWTService } from "sim-shared/aplication/JWT.service.js"
import { env } from "./env/index.js"
import { JWTHeader } from "sim-shared/domain/JWT.js"
const PRIVATE_KEY_PATH = env.OBJ_PEM_PATH
const GET_TOKEN_URL = "https://idp.docapost.io/auth/realms/GETWAY/protocol/openid-connect/token"
const REFRESH_TOKEN_URL = GET_TOKEN_URL
const DEFAULT_BODY: GrantAccessRequestBody = {
grant_type: "client_credentials",
client_id: env.OBJ_CLIENT_ID,
client_assertion_type: "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
client_assertion: env.OBJ_CLI_ASSERTION
}
const DEFAULT_HEADERS = {
"content-type": "application/x-www-form-urlencoded"
}
const DEFAULT_HEADERS_JWT = {
alg: "RS256",
typ: "JWT",
kid: env.OBJ_KID,
}
const DEFAULT_DATA_JWT = {
sub: env.OBJ_CLIENT_ID,
iss: env.OBJ_CLIENT_ID,
aud: "https://idp.docapost.io/auth/realms/GETWAY",
jti: Date.now().toString(),
}
function addIATHeaders(authHeaders: Object) {
const headers = <JWTHeader>{
...authHeaders,
sub: env.OBJ_CLIENT_ID,
iss: env.OBJ_CLIENT_ID,
aud: GET_TOKEN_URL,
jti: Date.now().toString(),
iat: Math.floor(Date.now() / 1000),
exp: Math.floor(Date.now() / 1000) + 5 * 60,
}
return headers
}
export const jwtService = new JWTService({
transformJWTHeaders: addIATHeaders,
defaultHeaders: DEFAULT_HEADERS,
defaultBody: DEFAULT_BODY,
defaultJWTHeaders: DEFAULT_HEADERS_JWT,
defaultJWTPayload: DEFAULT_DATA_JWT,
privateKeyPath: PRIVATE_KEY_PATH,
tokenUrl: GET_TOKEN_URL,
refreshTokenUrl: REFRESH_TOKEN_URL
})

View File

@@ -1,5 +1,6 @@
export type ActionData = {
correlation_id?: string;
dueDate: string, // isodate
filter?: {} // no se si hace falta
identifier: {

View File

@@ -1,5 +1,5 @@
import { OperationsRepository } from "sim-shared/infrastructure/OperationRepository.js"
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js"
import { startRMQClient } from "#config/eventBus.config.js"
import { httpInstance } from "#config/httpClient.config.js"
import { pgPool } from "#config/postgreConfig.js"
@@ -7,6 +7,15 @@ import { PgClient } from "sim-shared/infrastructure/PgClient.js"
import { SimUseCases } from "./aplication/Sim.usecases.js"
import { SimController } from "./aplication/Sim.controller.js"
import { SimRouter } from "./aplication/Sim.router.js"
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
import { PauseCancelTaskRepository } from "#adapters/PauseCancelTaskRepository.js"
import express from "express"
import cors from "cors"
import assert from "node:assert";
import { env } from "#config/env/index.js"
import { ObjeniousLinesRepository } from "sim-shared/infrastructure/ObjeniousLinesRepository.js"
import { postgresClientIntranet } from "#config/intranetPostgresConfig.js"
async function startWorker() {
const rmqClient = await startRMQClient()
@@ -17,19 +26,83 @@ async function startWorker() {
await pgClient.checkDatabaseConnection()
const operationRepository = new OperationsRepository(pgClient)
const simActivationController = new SimController(
rmqClient,
new SimUseCases({
httpClient: httpClient,
operationRepository: operationRepository
})
const operationRepository = new ObjeniousOperationsRepository(
httpClient,
pgClient,
)
const simRouter = new SimRouter(simActivationController, rmqClient)
const orderRepository = new OrderRepository(pgClient)
const pauseRepository = new PauseCancelTaskRepository(pgClient)
const linesRepository = new ObjeniousLinesRepository(
postgresClientIntranet
)
const simUseCases = new SimUseCases({
httpClient: httpClient,
operationRepository: operationRepository,
orderRepository: orderRepository,
pauseRepository: pauseRepository,
objeniousLinesRepository: linesRepository
})
const simController = new SimController(
rmqClient,
simUseCases
)
const simRouter = new SimRouter(simController, rmqClient)
// de momento solo una cola por simplificar
rmqClient.consume("sim.objenious", simRouter.route)
// Servidor express
const port = env.PORT
const app = express()
app.use(cors())
app.use(express.json())
app.get("/health", async (req, res) => {
res.json({ ok: "true" })
})
// TODO: meter el template de controller con los validadores
app.get("/lines/:iccid/suspended-time", async (req, res) => {
const iccid = req.params.iccid
if (!iccid) {
res.status(400).json({ error: "iccid is required" })
return
}
const result = await simUseCases.getSuspendedTime(iccid)
if (result.error !== undefined) {
res.status(500).json({ error: result.error })
return
}
res.json(result) // {data:{...}} || {error:{...}}
})
/**
* Opciones query:
* - state
*
* Respuestas:
* - OK: data: {
* lines: ObjeniousLineDb[],
* offset: number,
* rowCount: number
* }
*
* - ERR: error: {
* message: string
* }
*/
app.get("/lines", simController.queryLines())
assert.ok(port, "Puerto del servicio no definido")
app.listen(port, () => {
console.log(`[o] HTTP server listening on port ${port}`)
})
}
startWorker()

View File

@@ -0,0 +1,72 @@
import { after, before, describe, it } from "node:test";
import { CreatePauseCancelTaskDTO, PauseCancelTaskRepository } from "./PauseCancelTaskRepository.js";
import { postgrClient } from "#config/postgreConfig.js";
import assert from "node:assert";
const testTask: CreatePauseCancelTaskDTO = {
iccid: "1234",
operation_type: "suspend",
activation_date: new Date(),
next_check: new Date(),
action_data: {
dueDate: new Date().toString(),
correlation_id: "12223",
identifier: {
identifiers: ["1234"],
identifierType: "ICCID"
}
}
}
describe("Test PauseCancelTaskRepository - DB", () => {
const createdIds: number[] = [];
const pauseRepo = new PauseCancelTaskRepository(postgrClient)
before(() => {
})
after(() => {
})
it("Should create a task", async () => {
const created = await pauseRepo.addTask(testTask)
assert.ok(created != undefined, "A value must be returned always")
assert.ok(created.error == undefined, "Should not return a error")
assert.ok(created.data != undefined, "Data must be returned")
createdIds.push(created.data.id)
})
it("Should update a existing task", async () => {
const updated = await pauseRepo.updateTask({
id: createdIds[0],
next_check: new Date()
})
assert.ok(updated != undefined, "A value must be returned always")
assert.ok(updated.error == undefined, "Should not return a error")
assert.ok(updated.data != undefined, "Data must be returned")
})
it("Should finish a existing task", async () => {
const finish = await pauseRepo.finishTask({
id: createdIds[0],
error: "ok"
})
assert.ok(finish != undefined, "A value must be returned always")
assert.ok(finish.error == undefined, "Should not return a error")
assert.ok(finish.data != undefined, "Data must be returned")
})
it("Should get at least 1 pending task", async () => {
const created = await pauseRepo.addTask(testTask)
const pending = await pauseRepo.getPending()
assert.ok(pending != undefined, "A value must be returned always")
assert.ok(pending.error == undefined, "Should not return a error")
assert.ok(pending.data != undefined, "Data must be returned")
console.log("--> ", pending.data[0])
})
})

View File

@@ -0,0 +1,128 @@
import { Result } from "sim-shared/domain/Result.js";
import { QueryResult } from "pg";
import { PgClient } from "sim-shared/infrastructure/PgClient.js";
import { AxiosError } from "axios";
import { ActionData } from "#domain/DTOs/objeniousapi.js";
export type PauseCancelTask = {
id: number;
iccid: string;
operation_type: "suspend" | "terminate",
last_checked?: Date | null;
activation_date?: Date | null;
next_check?: Date | null;
completed_date?: Date | null;
error?: string | null;
action_data: ActionData
}
export type CreatePauseCancelTaskDTO = Pick<PauseCancelTask, "iccid" | "activation_date" | "next_check" | "operation_type" | "action_data">
export type UpdatePauseCancelTaskDTO = Pick<PauseCancelTask, "id" | "next_check">
export type FinishPauseCancelTaskDTO = Pick<PauseCancelTask, "id" | "error">
/**
* Repositorio para compensar los problemas de cacelcaiones/pausas de objenious a
* la hora aplicarlo sobre una linea con el billing a test.
*/
export class PauseCancelTaskRepository {
constructor(
private readonly pgClient: PgClient
) {
}
/**
* Obtiene las siguientes que se pueden lanzar, puede haber más pero
* estan pendientes
*/
public async getPending(): Promise<Result<string, PauseCancelTask[]>> {
const sql = `
SELECT * FROM pause_cancel_tasks
WHERE completed_date IS NULL
AND (next_check <= NOW() OR next_check IS NULL)
ORDER BY id ASC;
`;
try {
const res: QueryResult<PauseCancelTask> = await this.pgClient.query(sql);
return {
data: res.rows
}
} catch (e) {
return {
error: (e as AxiosError).message
}
}
}
public async addTask(task: CreatePauseCancelTaskDTO): Promise<Result<string, PauseCancelTask>> {
const sql = `
INSERT INTO pause_cancel_tasks (iccid, activation_date, next_check, last_checked, operation_type, action_data)
VALUES ($1, $2, $3, now(), $4, $5)
RETURNING *;
`;
try {
const values = [task.iccid, task.activation_date, task.next_check, task.operation_type, JSON.stringify(task.action_data)];
const res: QueryResult<PauseCancelTask> = await this.pgClient.query(sql, values);
return {
data: res.rows[0]
}
} catch (e) {
return {
error: (e as AxiosError).message
}
}
}
/**
* Se ha vuelto a comprobar la tarea pero sigue en test
*/
public async updateTask(updateData: UpdatePauseCancelTaskDTO): Promise<Result<string, PauseCancelTask>> {
const sql = `
UPDATE pause_cancel_tasks
SET last_checked = now(), next_check = $1
WHERE id = $2
RETURNING *;
`;
try {
const res = await this.pgClient.query<PauseCancelTask>(sql, [updateData.next_check, updateData.id]);
return {
data: res.rows[0]
}
} catch (e) {
return {
error: (e as AxiosError).message
}
}
}
/**
* La tarea ha termiando bien o mal
*/
public async finishTask(finishData: FinishPauseCancelTaskDTO) {
const sql = `
UPDATE pause_cancel_tasks
SET completed_date = NOW(), error = $1
WHERE id = $2
RETURNING *;
`;
try {
const res = await this.pgClient.query(sql, [finishData.error, finishData.id]);
return {
data: res.rows[0]
}
} catch (e) {
return {
error: (e as AxiosError).message
}
}
}
}
export default PauseCancelTask

View File

@@ -13,10 +13,10 @@
"types": "./config/*.ts",
"default": "./config/*.js"
},
"#shared/*.js": {
"sim-shared/*.js": {
"default": "../sim-shared/*.js"
},
"#shared/*": {
"sim-shared/*": {
"default": "../sim-shared/*.js"
},
"#adapters/*.js": {
@@ -53,7 +53,7 @@
}
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"test": "node --import tsx --test ./**/*.test.ts",
"dev": "tsx watch index.ts",
"build": "tsc --build && yarn tsc-alias -p tsconfig.json && cp .env package.json ../../dist/packages/sim-consumidor-objenious/",
"start": "node ../../dist/packages/sim-consumidor-objenious/index.js",
@@ -68,7 +68,6 @@
"cors": "*",
"dotenv": "*",
"express": "*",
"sim-consumidor-objenious": "sim-consumidor-objenious:*",
"sim-shared": "sim-shared:*",
"typescript": "*"
},

View File

@@ -9,6 +9,10 @@
],
"include": [
"**/*.ts",
"**/*.d.ts",
"../../packages/sim-shared/**/*.ts"
],
"files": [
"index.ts"
]
}

View File

@@ -0,0 +1,127 @@
import { BodyValidator } from "sim-shared/aplication/BodyValidator.js"
import { OrderUsecases } from "./Order.usecases.js"
import { Request, Response } from "express"
import { idValidator, uuidValidator } from "./httpValidators.js"
import { PaginationArgs } from "sim-shared/domain/PaginationArgs.js"
export class OrderController {
private orderUseCases: OrderUsecases
constructor(args: {
orderUseCases: OrderUsecases
}) {
this.orderUseCases = args.orderUseCases
}
public getById() {
return this.controllerGenerator<{ id: number }, { id: number }>({
validator: idValidator,
useCase: this.orderUseCases.getById(),
onError: (data, error) => { console.error(error) },
onSuccess: (data) => console.log(data)
})
}
public getPending() {
return this.controllerGenerator<PaginationArgs, PaginationArgs>({
validator: undefined,
useCase: this.orderUseCases.getPending(),
onError: (data, error) => { console.error(error) },
onSuccess: (data) => console.log(data)
})
}
public getByQueueId() {
return this.controllerGenerator<{ correlation_id: string }, { correlation_id: string }>({
validator: uuidValidator,
useCase: this.orderUseCases.getByQueueId(),
onError: (data, error) => { console.error(error) },
onSuccess: (data) => console.log(data)
})
}
/**
* TODO:
* - En proceso de validacion, tiene varios problemas
* - Está copiado, planteado inyectarlo
* - Map para la respuesta?
*
* Abstrae el proceso de
* Peticion -> validacion del body -> map del body -> useCase -> OK/ERR
*
* <O> Representa el dato original
* <P> Representa el dato después del mapeo
*/
public controllerGenerator<O extends object, P extends object>(args: {
validator?: BodyValidator<O>,
mapBody?: (body: O) => P,
useCase: (args: P) => Promise<any>,
onError: (args: O | P, error: string) => void,
onSuccess: (args: P) => void,
}) {
return async (req: Request, res: Response) => {
//scketchy
const body = { ...req.body, ...req.params }
// 1. Validacion del body
try {
if (args.validator != undefined)
args.validator.validate(body)
} catch (e) {
if (args.onError != undefined) args.onError(body, e as string)
res.status(422).json({
errors: {
msg: e
}
})
}
// 2. Transformacion del body
let data: P = body;
try {
if (args.mapBody != undefined)
data = args.mapBody(body)
} catch (e) {
res.status(422).json({
errors: {
msg: "Error parseando el body: " + e
}
})
}
// 3. Aplicacion del UseCase
try {
const usecaseResult = await args.useCase(data)
// 4.1 Se devuelve el caso de exito pero no encontrado
if (usecaseResult.data == undefined && usecaseResult.error == undefined) {
res.status(404).json(usecaseResult).send()
args.onSuccess(data)
return;
}
// 4.2 Caso de error controlado desde el caso de uso
if (usecaseResult.error != undefined) {
res.status(500).json(usecaseResult).send()
return;
}
// 4.2 Se devuelve al usuario el caso de exito de encontrado
res.status(200).json(
usecaseResult
).send()
args.onSuccess(data)
} catch (err) {
// 4.3 Error del caso de uso
res.status(500).json({
errors: {
msg: "Error general:" + err
}
}).send()
return;
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More