Compare commits
135 Commits
c40876af77
...
WEBINT-175
| Author | SHA1 | Date | |
|---|---|---|---|
| 30f1819a4e | |||
| 964ea6add9 | |||
| 602878acf4 | |||
| 0aa52feaac | |||
| 15b70309da | |||
| 7001fccbf7 | |||
| cffee785b2 | |||
| 33d260310c | |||
| e359acc1d5 | |||
| bb4bce4a6d | |||
| eac74ef0cd | |||
| 1dc4eb5648 | |||
| a35a6c2b60 | |||
| 1f78f4a3e1 | |||
| 1e98559f3a | |||
| ef0f860b9d | |||
| 0bff55379f | |||
| 4d34308a13 | |||
| 70bf73b0a4 | |||
| e3849d8217 | |||
| d9854a12a8 | |||
| 48d387a8da | |||
| 93d3e13793 | |||
| 031f5d5cf0 | |||
| 047669bab2 | |||
| 5ea5939e3a | |||
| 7ff3f13af4 | |||
| a9589f578b | |||
| a27e4b30d2 | |||
| 4168949b9e | |||
| e6ff54a15d | |||
| 3956797020 | |||
| 7d88359263 | |||
| 1b6da651a6 | |||
| 9b305f887f | |||
| 9506b9e28e | |||
| 61c0edca07 | |||
| 9470b5605d | |||
| cbbc0f6edb | |||
| 9d63d23754 | |||
| a95655a2a6 | |||
| 025801a689 | |||
| 28880c4d99 | |||
| 5bb3bc554b | |||
| cfb907b840 | |||
| d5d7953fd2 | |||
| 96298aab25 | |||
| c17cca1e81 | |||
| 7264efcf79 | |||
| 8934bcd603 | |||
| bdd08dbc56 | |||
| 7d47fde806 | |||
| ad207fb732 | |||
| bd9081b5bc | |||
| a429e9d14a | |||
| 81eb986313 | |||
| 58bedc42f1 | |||
| b97f422261 | |||
| 7a7dc33724 | |||
| 7743bd1f0d | |||
| 2897d7aa3c | |||
| 0fd7eafcf3 | |||
| 71253d216e | |||
| aeea6cfefd | |||
| e8eb925834 | |||
| 7cf9cc60e6 | |||
| 1e9818d430 | |||
| 39c0e87758 | |||
| 5771972e2a | |||
| ea13403dc3 | |||
| 8d9a9b84b8 | |||
| 9b92f3506b | |||
| 1798118f6b | |||
| eba2b8c569 | |||
| b6b2cf6cc8 | |||
| a0faa2d105 | |||
| d323f804fc | |||
| 978454754c | |||
| b6091b15da | |||
| a6794a061b | |||
| fafea3ce04 | |||
| 992f639f35 | |||
| f57309b06a | |||
| 3be2b8f20d | |||
| 4853fec7ff | |||
| 04a6e50b7a | |||
| 8ca3d095e6 | |||
| ca1144b55c | |||
| 18422fbe38 | |||
| f221035c8b | |||
| 02c80cd503 | |||
| c416114c50 | |||
| e329b36933 | |||
| 5c64c84e2a | |||
| fc319372be | |||
| 12dae135b5 | |||
| b208c9c301 | |||
| 1583ae539e | |||
| b6ec37c339 | |||
| 459523666f | |||
| 8427613114 | |||
| 5d3465fd97 | |||
| 39a2622cb1 | |||
| 0a42e4776d | |||
| 44fea21a56 | |||
| 8a53fb6749 | |||
| 3a55e5f057 | |||
| 6bc1340930 | |||
| 2299d5f735 | |||
| e454a3fee1 | |||
| 672de92e5b | |||
| 404f142c7e | |||
| 8fcf592b88 | |||
| d91e7e746c | |||
| f176788267 | |||
| c4721328f8 | |||
| e878c9a27e | |||
| d17e399a4c | |||
| 068ec20a9b | |||
| b32e1af71c | |||
| fe5b492218 | |||
| a217914ec6 | |||
| 669abd3c43 | |||
| 43cef09f89 | |||
| 0a9470b1d8 | |||
| 6c6916c4cb | |||
| 39f8708fca | |||
| 4df80f6171 | |||
| 5296f58cef | |||
| ecd5190713 | |||
| 71e0ec959c | |||
| 46ac54f7ab | |||
| 2c9bf9dd93 | |||
| 19b2958a9c | |||
| a39b84e107 |
19
.env
19
.env
@@ -1,4 +1,5 @@
|
||||
PORT=3000
|
||||
API_HOSTNAME=0.0.0.0
|
||||
RABBITMQ_USER=guest
|
||||
RABBITMQ_PASSWORD=guest
|
||||
|
||||
@@ -13,11 +14,19 @@ RABBITMQ_SECURE=false
|
||||
RABBITMQ_VHOST=sim-vhost
|
||||
|
||||
# Hay cosas que unificar de varios servicios
|
||||
POSTGRES_DB=postgres
|
||||
POSTGRES_DATABASE=postgres
|
||||
#POSTGRES_HOST=postgresql-sim
|
||||
POSTGRES_HOST=localhost
|
||||
POSTGRES_PORT=5432
|
||||
DEV_POSTGRES_PORT=5432
|
||||
POSTGRES_DB=postgres
|
||||
POSTGRES_DATABASE=postgres
|
||||
POSTGRES_PORT=5433
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASSWORD=1234
|
||||
POSTGRES_PASSWORD='1234'
|
||||
|
||||
# Para el postgres local para generar el script de resultado de migraciones
|
||||
PGHOST=localhost
|
||||
PGUSER=alvar
|
||||
PGPASSWORD=alvar
|
||||
PGPORT=5433
|
||||
|
||||
# Proxy
|
||||
CONNECTIONS_URL=https://sim-connections.savefamilygps.net
|
||||
|
||||
@@ -3,3 +3,9 @@ compressionLevel: mixed
|
||||
enableGlobalCache: false
|
||||
|
||||
nodeLinker: node-modules
|
||||
|
||||
npmScopes:
|
||||
sf-alvar:
|
||||
npmRegistryServer: "https://git.savefamilygps.net/api/packages/SaveFamily/npm/"
|
||||
|
||||
npmRegistryServer: "https://registry.npmjs.org/"
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
#/bin/bash
|
||||
rm deployment/database/init.sql
|
||||
cat deployment/database/*.sql >deployment/database/init.sql
|
||||
|
||||
docker compose -f deployment/local/docker/docker-compose.yaml --project-directory ./ build
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
# stage base para coordinar las fases de build y ejecucion
|
||||
FROM node:22-alpine AS base
|
||||
WORKDIR /usr/local/app
|
||||
COPY ./package.json ./yarn.lock ./
|
||||
COPY ./package.json ./
|
||||
#COPY ./package.json ./yarn.lock ./
|
||||
RUN corepack enable && \
|
||||
corepack prepare yarn@4.12.0 --activate
|
||||
# copia el codigo en general
|
||||
|
||||
@@ -4,16 +4,18 @@ CREATE TYPE status_enum AS ENUM ('noRequestID','noMassID','running','finished','
|
||||
-- Tabla para gestionar las peticiones de cambio de objenious.
|
||||
-- Para una o mas lineas se pueden lanzar operacione que no sabemos
|
||||
-- con certeza cuando van a terminar.
|
||||
-- Estas tablas está fuertemente ligadas al sistema que usa la plataforma
|
||||
-- de objenioius y no debe unsarse para otra compañia.
|
||||
CREATE TABLE if not exists objenious_operation (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
retry_count INT DEFAULT 0,
|
||||
max_retry INT DEFAULT 5,
|
||||
max_date_retry TIMESTAMP DEFAULT NULL,
|
||||
retry_count INT DEFAULT 0, -- No implementado en codigo
|
||||
max_retry INT DEFAULT 5, -- No implementado en codigo
|
||||
max_date_retry TIMESTAMP DEFAULT NULL, -- No implementado en codigo
|
||||
iccids TEXT,
|
||||
request_id TEXT,
|
||||
mass_action_id TEXT,
|
||||
operation TEXT NOT NULL,
|
||||
start_date TIMESTAMP NOT NULL DEFAULT now(),
|
||||
start_date TIMESTAMP NOT NULL DEFAULT now(),
|
||||
last_change_date TIMESTAMP NOT NULL DEFAULT now(),
|
||||
end_date TIMESTAMP,
|
||||
error TEXT,
|
||||
@@ -24,7 +26,7 @@ CREATE TABLE if not exists objenious_operation (
|
||||
-- operaciones pendientes para revisar
|
||||
CREATE INDEX IF NOT EXISTS pending_operations
|
||||
ON objenious_operation(start_date)
|
||||
WHERE end_date IS NULL;
|
||||
WHERE end_date IS NULL;
|
||||
|
||||
CREATE TABLE if not exists objenious_operation_change (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
20
deployment/database/base/xx-volcado-objenious.sql
Normal file
20
deployment/database/base/xx-volcado-objenious.sql
Normal file
@@ -0,0 +1,20 @@
|
||||
CREATE table if not exists objenious_lines (
|
||||
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
simId BIGINT UNIQUE,
|
||||
status TEXT,
|
||||
iccid TEXT NOT NULL,
|
||||
msisdn TEXT,
|
||||
imei TEXT,
|
||||
imeiChangeDate TIMESTAMPTZ,
|
||||
offerCode TEXT,
|
||||
preactivationDate TIMESTAMPTZ, -- No viene con hora
|
||||
activationDate TIMESTAMPTZ,
|
||||
commercialStatus TEXT,
|
||||
commercialStatusDate TIMESTAMPTZ,
|
||||
billingStatus TEXT,
|
||||
billingStatusChangeDate TIMESTAMPTZ,
|
||||
billingActivationDate TIMESTAMPTZ,
|
||||
createDate TIMESTAMPTZ,
|
||||
raw JSONB,
|
||||
hash TEXT
|
||||
)
|
||||
106
deployment/database/generateSchema.sh
Executable file
106
deployment/database/generateSchema.sh
Executable file
@@ -0,0 +1,106 @@
|
||||
#!/bin/bash
|
||||
# --- Para que siempre se ejecute en el mismo path
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# --- Configuración por defecto ---
|
||||
MIGRATIONS_DIR="./migrations"
|
||||
OUTPUT_FILE_PREFIX="esquema_final"
|
||||
DB_NAME="temp_schema_build_$(date +%s)"
|
||||
|
||||
# --- Función de Ayuda ---
|
||||
usage() {
|
||||
echo "Uso: $0 -v <version> [-e <ruta_env>]"
|
||||
echo " -v Versión semántica objetivo (ej: 1.2.0)"
|
||||
echo " -e (Opcional) Ruta al archivo .env para cargar variables"
|
||||
echo " Los archivos de verions tienen que tener el formato x.x.x_descripcion.sql (Es importante la _ para serpar las partes) "
|
||||
exit 1
|
||||
}
|
||||
|
||||
# --- Procesar Argumentos (Flags) ---
|
||||
# v: obligatorio
|
||||
# e: opcionar
|
||||
while getopts "v:e:" opt; do
|
||||
case $opt in
|
||||
v) TARGET_VERSION="$OPTARG" ;;
|
||||
e) ENV_PATH="$OPTARG" ;;
|
||||
*) usage ;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Validar que la versión esté presente
|
||||
if [ -z "$TARGET_VERSION" ]; then
|
||||
echo "Error: La versión es obligatoria."
|
||||
usage
|
||||
fi
|
||||
|
||||
# --- Cargar variables de entorno ---
|
||||
if [ ! -z "$ENV_PATH" ]; then
|
||||
if [ -f "$ENV_PATH" ]; then
|
||||
echo "~> Cargando configuración desde: $ENV_PATH"
|
||||
# Exporta automáticamente las variables definidas en el archivo
|
||||
set -o allexport
|
||||
source "$ENV_PATH"
|
||||
set +o allexport
|
||||
else
|
||||
echo "Error: No se encontró el archivo .env en: $ENV_PATH"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "!> No se especificó archivo .env, usando variables del sistema actual"
|
||||
fi
|
||||
|
||||
# echo "Debug: Usuario es '$PGUSER'"
|
||||
# echo "Debug: Host es '$PGHOST'"
|
||||
# echo "Debug: Password es '$PGPASSWORD'" # Cuidado con mostrar esto
|
||||
|
||||
# --- Función de limpieza (Safety Net) ---
|
||||
cleanup() {
|
||||
echo "~> Limpiando: Eliminando base de datos temporal '$DB_NAME'"
|
||||
# Usamos las variables de conexión cargadas (si las hay)
|
||||
dropdb $DB_NAME --if-exists 2>/dev/null
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
# --- Inicio del Proceso ---
|
||||
echo "~> Iniciando build para versión: $TARGET_VERSION"
|
||||
|
||||
# 1. Crear BD temporal
|
||||
# Nota: Si tu .env tiene PGHOST, la BD se creará allí. Si no, en localhost.
|
||||
createdb $DB_NAME
|
||||
|
||||
# 2. Ejecutar script base (si existe)
|
||||
rm -rf init.sql
|
||||
cat base/*.sql >init.sql
|
||||
if [ -f "init.sql" ]; then
|
||||
echo "~> Ejecutando init.sql..."
|
||||
psql -d $DB_NAME -f init.sql >/dev/null
|
||||
fi
|
||||
|
||||
# 3. Iterar y filtrar migraciones
|
||||
echo "~> Aplicando migraciones hasta la versión $TARGET_VERSION..."
|
||||
|
||||
for f in $(ls $MIGRATIONS_DIR/*.sql | sort -V); do
|
||||
FILENAME=$(basename "$f")
|
||||
# Extraer versión (Asume formato V1.0.0_desc.sql o 1.0.0_desc.sql)
|
||||
FILE_VER=$(echo "$FILENAME" | sed -E 's/^V//' | awk -F_ '{print $1}')
|
||||
|
||||
# Comparación semántica
|
||||
echo "comparando $TARGET_VERSION con $FILE_VER"
|
||||
LOWEST=$(echo -e "$TARGET_VERSION\n$FILE_VER" | sort -V | head -n1)
|
||||
|
||||
if [ "$LOWEST" == "$FILE_VER" ] || [ "$FILE_VER" == "$TARGET_VERSION" ]; then
|
||||
echo "~> Aplicando: $FILENAME ($FILE_VER)"
|
||||
psql -d $DB_NAME -f "$f" >/dev/null
|
||||
else
|
||||
echo "~> Saltando: $FILENAME ($FILE_VER) - Mayor que objetivo"
|
||||
fi
|
||||
done
|
||||
|
||||
# 4. Generar nombre de archivo de salida
|
||||
OUTPUT_FILE="${OUTPUT_FILE_PREFIX}_v${TARGET_VERSION}.sql"
|
||||
|
||||
# 5. Extraer el esquema FINAL
|
||||
echo "~> Generando $OUTPUT_FILE ---"
|
||||
pg_dump -d $DB_NAME -s --no-owner --no-privileges >$OUTPUT_FILE
|
||||
|
||||
echo "o> Esquema guardado en $OUTPUT_FILE"
|
||||
@@ -1,150 +0,0 @@
|
||||
-- eliminar los drop para prod
|
||||
drop domain if exists imei_type cascade;
|
||||
CREATE DOMAIN imei_type as varchar(15);
|
||||
drop domain if exists iccid_type cascade;
|
||||
CREATE DOMAIN iccid_type as varchar(22);
|
||||
drop domain if exists imsi_type cascade;
|
||||
CREATE DOMAIN imsi_type as varchar(15);
|
||||
|
||||
|
||||
CREATE table if not exists sim_cards (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
imei imei_type,
|
||||
iccid iccid_type,
|
||||
imsi imsi_type,
|
||||
user_id BIGINT,
|
||||
subscription_id BIGINT,
|
||||
created_at TIMESTAMP,
|
||||
last_update TIMESTAMP,
|
||||
deleted_at TIMESTAMP
|
||||
);
|
||||
|
||||
|
||||
CREATE TABLE if not exists sim_envio (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
codigo_origen TEXT,
|
||||
codigo_distrito TEXT,
|
||||
pedido_id BIGINT,
|
||||
sim_id BIGINT,
|
||||
|
||||
fecha_envio TIMESTAMP,
|
||||
fecha_email TIMESTAMP,
|
||||
is_preactivado BOOLEAN,
|
||||
fecha_devolucion TIMESTAMP,
|
||||
created_at TIMESTAMP,
|
||||
|
||||
CONSTRAINT fk_sim_id
|
||||
FOREIGN KEY(sim_id) REFERENCES sim_cards(id)
|
||||
);
|
||||
|
||||
-- Mock, No es parte de SIMs
|
||||
CREATE TABLE if not exists sf_subscription (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY
|
||||
);
|
||||
|
||||
-- No habria que meterle las propiedades del tipo de subscripcion
|
||||
CREATE TABLE if not exists sim_subscription_types (
|
||||
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
subscription TEXT NOT NULL,
|
||||
created_at TIMESTAMP,
|
||||
updated_at TIMESTAMP,
|
||||
deleted_at TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE if not exists sim_company (
|
||||
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
name TEXT,
|
||||
created_at TIMESTAMP,
|
||||
updated_at TIMESTAMP,
|
||||
deleted_at TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE sim_subscription (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
company_id INT,
|
||||
subscription_type_id INT,
|
||||
sim_id BIGINT,
|
||||
order_id BIGINT,
|
||||
|
||||
created_at TIMESTAMP,
|
||||
updated_at TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
|
||||
CONSTRAINT fk_sim_id
|
||||
FOREIGN KEY(sim_id) REFERENCES sim_cards(id),
|
||||
|
||||
CONSTRAINT fk_company_id
|
||||
FOREIGN KEY(company_id) REFERENCES sim_company(id),
|
||||
|
||||
CONSTRAINT fk_subscription_type_id
|
||||
FOREIGN KEY(subscription_type_id) REFERENCES sim_subscription_types(id)
|
||||
);
|
||||
|
||||
CREATE TABLE if not exists sim_subscription_operations (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
sim_id BIGINT,
|
||||
operation_type TEXT NOT NULL,
|
||||
happened_at TIMESTAMP,
|
||||
|
||||
CONSTRAINT valid_operations CHECK (
|
||||
operation_type in ('free','preactivate','activate','pause','cancel')
|
||||
),
|
||||
|
||||
CONSTRAINT fk_subscription_id
|
||||
FOREIGN KEY(sim_id)
|
||||
REFERENCES sim_subscription(id)
|
||||
);
|
||||
|
||||
-- Se supone que indica un cambio
|
||||
CREATE TABLE sim_subscription_historic (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
subscription_id BIGINT,
|
||||
iccid iccid_type,
|
||||
company_id INT
|
||||
);
|
||||
|
||||
CREATE TYPE status_enum AS ENUM ('noRequestID','noMassID','running','finished','error','other');
|
||||
|
||||
-- Tabla para gestionar las peticiones de cambio de objenious.
|
||||
-- Para una o mas lineas se pueden lanzar operacione que no sabemos
|
||||
-- con certeza cuando van a terminar.
|
||||
CREATE TABLE if not exists objenious_operation (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
retry_count INT DEFAULT 0,
|
||||
max_retry INT DEFAULT 5,
|
||||
max_date_retry TIMESTAMP DEFAULT NULL,
|
||||
iccids TEXT,
|
||||
request_id TEXT,
|
||||
mass_action_id TEXT,
|
||||
operation TEXT NOT NULL,
|
||||
start_date TIMESTAMP NOT NULL DEFAULT now(),
|
||||
last_change_date TIMESTAMP NOT NULL DEFAULT now(),
|
||||
end_date TIMESTAMP,
|
||||
error TEXT,
|
||||
status status_enum,
|
||||
objenious_status TEXT
|
||||
);
|
||||
|
||||
-- operaciones pendientes para revisar
|
||||
CREATE INDEX IF NOT EXISTS pending_operations
|
||||
ON objenious_operation(start_date)
|
||||
WHERE end_date IS NULL;
|
||||
|
||||
CREATE TABLE if not exists objenious_operation_change (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
operation_id BIGINT,
|
||||
creation_date TIMESTAMP NOT NULL DEFAULT now(),
|
||||
error TEXT,
|
||||
new_status status_enum,
|
||||
previous_status status_enum,
|
||||
new_objenious_status TEXT,
|
||||
previous_objenious_status TEXT,
|
||||
new_request_id TEXT,
|
||||
new_mass_action_id TEXT,
|
||||
|
||||
CONSTRAINT fk_operation_id
|
||||
FOREIGN KEY(operation_id) REFERENCES objenious_operation(id)
|
||||
);
|
||||
|
||||
CREATE INDEX operation_change
|
||||
ON objenious_operation_change(operation_id);
|
||||
48
deployment/database/migrations/0.1.0_objenious.sql
Normal file
48
deployment/database/migrations/0.1.0_objenious.sql
Normal file
@@ -0,0 +1,48 @@
|
||||
|
||||
CREATE TYPE status_enum AS ENUM ('noRequestID','noMassID','running','finished','error','other');
|
||||
|
||||
-- Tabla para gestionar las peticiones de cambio de objenious.
|
||||
-- Para una o mas lineas se pueden lanzar operacione que no sabemos
|
||||
-- con certeza cuando van a terminar.
|
||||
-- Estas tablas está fuertemente ligadas al sistema que usa la plataforma
|
||||
-- de objenioius y no debe unsarse para otra compañia.
|
||||
CREATE TABLE if not exists objenious_operation (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
retry_count INT DEFAULT 0, -- No implementado en codigo
|
||||
max_retry INT DEFAULT 5, -- No implementado en codigo
|
||||
max_date_retry TIMESTAMP DEFAULT NULL, -- No implementado en codigo
|
||||
iccids TEXT,
|
||||
request_id TEXT,
|
||||
mass_action_id TEXT,
|
||||
operation TEXT NOT NULL,
|
||||
start_date TIMESTAMP NOT NULL DEFAULT now(),
|
||||
last_change_date TIMESTAMP NOT NULL DEFAULT now(),
|
||||
end_date TIMESTAMP,
|
||||
error TEXT,
|
||||
status status_enum,
|
||||
objenious_status TEXT
|
||||
);
|
||||
|
||||
-- operaciones pendientes para revisar
|
||||
CREATE INDEX IF NOT EXISTS pending_operations
|
||||
ON objenious_operation(start_date)
|
||||
WHERE end_date IS NULL;
|
||||
|
||||
CREATE TABLE if not exists objenious_operation_change (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
operation_id BIGINT,
|
||||
creation_date TIMESTAMP NOT NULL DEFAULT now(),
|
||||
error TEXT,
|
||||
new_status status_enum,
|
||||
previous_status status_enum,
|
||||
new_objenious_status TEXT,
|
||||
previous_objenious_status TEXT,
|
||||
new_request_id TEXT,
|
||||
new_mass_action_id TEXT,
|
||||
|
||||
CONSTRAINT fk_operation_id
|
||||
FOREIGN KEY(operation_id) REFERENCES objenious_operation(id)
|
||||
);
|
||||
|
||||
CREATE INDEX operation_change
|
||||
ON objenious_operation_change(operation_id);
|
||||
67
deployment/database/migrations/1.0.0_orders.sql
Normal file
67
deployment/database/migrations/1.0.0_orders.sql
Normal file
@@ -0,0 +1,67 @@
|
||||
|
||||
-- Tablas para el seguimiento de las operaciones de SIM sin importar
|
||||
-- la cmpañia.
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE order_types AS ENUM ('activate','preactivate','cancel','pause','reactivate','unknown');
|
||||
CREATE TYPE order_status AS ENUM (
|
||||
'pending', -- Mensaje creado/enviado a RabbitMQ
|
||||
'running', -- Consumidor ha cogido el mensaje (opcional)
|
||||
'finished', -- Procesado correctamente
|
||||
'failed', -- Falló, pero podría reintentarse (Pasar a delay?)
|
||||
'dlx' -- Falló definitivamente y está en Dead Letter Exchange
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS order_tracking (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
correlation_id VARCHAR(255) NOT NULL, -- ID compartido con RabbitMQ (message_id)
|
||||
exchange VARCHAR(100), -- Exchange al que se envia (de momento solo hay 1 principal sin contar delay y dlx)
|
||||
routing_key VARCHAR(100), -- Routing key del mensaje
|
||||
order_type order_types NOT NULL DEFAULT 'unknown',
|
||||
|
||||
payload JSONB, -- Duda si es optimo guardar la copia, es útil en caso de fallo
|
||||
|
||||
-- Campos de reintentos?
|
||||
|
||||
status order_status NOT NULL DEFAULT 'pending',
|
||||
retry_count INT DEFAULT 0,
|
||||
error_message TEXT, -- Razón del fallo
|
||||
error_stacktrace TEXT,
|
||||
|
||||
start_date TIMESTAMP NOT NULL DEFAULT (now() at time zone 'utc'),
|
||||
update_date TIMESTAMP NOT NULL DEFAULT (now() at time zone 'utc'),
|
||||
finish_date TIMESTAMP
|
||||
);
|
||||
|
||||
-- Busqueda según id de rabbit
|
||||
CREATE INDEX IF NOT EXISTS idx_order_correlation
|
||||
ON order_tracking(correlation_id);
|
||||
-- Ordenenes que todavia no han finalizado
|
||||
CREATE INDEX IF NOT EXISTS pending_orders
|
||||
ON order_tracking(start_date)
|
||||
WHERE order_tracking.finish_date IS NULL;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS order_history(
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
order_id BIGINT NOT NULL,
|
||||
previous_status order_status NOT NULL, -- Siempre hay un estado anterior, para casos excepcioneale "unknown"
|
||||
new_status order_status NOT NULL,
|
||||
change_reason TEXT,
|
||||
change_date TIMESTAMP NOT NULL DEFAULT (now() at time zone 'utc'),
|
||||
|
||||
CONSTRAINT fk_order_id
|
||||
FOREIGN KEY(order_id)
|
||||
REFERENCES order_tracking(id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- fk de order
|
||||
CREATE INDEX IF NOT EXISTS idx_order_id
|
||||
ON order_history(order_id);
|
||||
|
||||
-- busquedas por fecha
|
||||
CREATE INDEX IF NOT EXISTS idx_order_change_date
|
||||
ON order_history(change_date);
|
||||
|
||||
12
deployment/database/migrations/1.0.1_utc.sql
Normal file
12
deployment/database/migrations/1.0.1_utc.sql
Normal file
@@ -0,0 +1,12 @@
|
||||
/*
|
||||
* Fechas modificadas para que todas sean en base a 'UTC'
|
||||
* */
|
||||
ALTER TABLE objenious_operation
|
||||
ALTER COLUMN start_date SET DEFAULT (now() at time zone 'utc'),
|
||||
ALTER COLUMN last_change_date SET DEFAULT (now() at time zone 'utc');
|
||||
|
||||
ALTER TABLE objenious_operation_change
|
||||
ALTER COLUMN creation_date SET DEFAULT (now() at time zone 'utc');
|
||||
|
||||
|
||||
|
||||
30
deployment/database/migrations/1.0.2_timezones.sql
Normal file
30
deployment/database/migrations/1.0.2_timezones.sql
Normal file
@@ -0,0 +1,30 @@
|
||||
|
||||
/*
|
||||
* Fechas modificadas para que se puedan hacer query en base a la zona horaria objetivo
|
||||
* SELECT col_date at time zone 'cet' -- devuleve la fecha en esa zona
|
||||
* SELECT col_date -- devuleve la fecha en UTC con el offset de la zona horaria
|
||||
*
|
||||
* */
|
||||
|
||||
ALTER TABLE objenious_operation
|
||||
ALTER COLUMN start_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
|
||||
ALTER COLUMN start_date SET DEFAULT now(),
|
||||
ALTER COLUMN last_change_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
|
||||
ALTER COLUMN last_change_date SET DEFAULT now(),
|
||||
ALTER COLUMN end_date SET DATA TYPE TIMESTAMP WITH TIME ZONE;
|
||||
|
||||
ALTER TABLE objenious_operation_change
|
||||
ALTER COLUMN creation_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
|
||||
ALTER COLUMN creation_date SET DEFAULT now();
|
||||
|
||||
ALTER TABLE order_tracking
|
||||
ALTER COLUMN start_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
|
||||
ALTER COLUMN start_date SET DEFAULT now(),
|
||||
ALTER COLUMN update_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
|
||||
ALTER COLUMN update_date SET DEFAULT now(),
|
||||
ALTER COLUMN finish_date SET DATA TYPE TIMESTAMP WITH TIME ZONE;
|
||||
|
||||
ALTER TABLE order_history
|
||||
ALTER COLUMN change_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
|
||||
ALTER COLUMN change_date SET DEFAULT now();
|
||||
|
||||
10
deployment/database/migrations/1.1.0_webhook-order.sql
Normal file
10
deployment/database/migrations/1.1.0_webhook-order.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* A que endpoint actualizar el estado de los order, si se especificase.
|
||||
* Se asume que siempre se usa POST.
|
||||
* Se separa host de enpoint para dejar host como default el origen de la
|
||||
* peticion anterior y poder hacer filtrados
|
||||
*/
|
||||
|
||||
ALTER TABLE order_tracking
|
||||
ADD COLUMN webhook_host TEXT,
|
||||
ADD COLUMN webhook_endpoint TEXT;
|
||||
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* En la tabla de orders de objenious no hay forma de saber a a que mensaje está Solicitando
|
||||
* cada operación.
|
||||
*/
|
||||
|
||||
ALTER TABLE objenious_operation
|
||||
ADD COLUMN correlation_id TEXT;
|
||||
@@ -0,0 +1,32 @@
|
||||
/**
|
||||
* Para la tarea WEBINT-328-Pausas-cacelaciones.
|
||||
* Almacena las pausas/cancelaciones que no se han podido hacer porque la linea esta en
|
||||
* "Test"
|
||||
*/
|
||||
|
||||
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE SUSPENDTERMINATE AS ENUM ('suspend','terminate');
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS pause_cancel_tasks (
|
||||
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
iccid TEXT NOT NULL,
|
||||
operation_type SUSPENDTERMINATE,
|
||||
last_checked TIMESTAMPTZ, -- Última vez que se ha comprobado que no esté en test
|
||||
activation_date TIMESTAMPTZ, -- Fecha de activacion para comprobar si ha pasdo un mes
|
||||
next_check TIMESTAMPTZ, -- Si se ha comprobado se asignará la siguiente fecha de revision
|
||||
|
||||
completed_date TIMESTAMPTZ, -- Cuando se ha completado, para bien o mal.
|
||||
error TEXT,
|
||||
action_data JSONB -- datos de la operacion original.
|
||||
);
|
||||
|
||||
-- Indice de las tareas que no han terminado
|
||||
CREATE INDEX idx_pause_cancel_tasks_pending
|
||||
ON pause_cancel_tasks (next_check)
|
||||
WHERE completed_date IS NULL;
|
||||
|
||||
|
||||
@@ -1,23 +1,25 @@
|
||||
# --- Release image ---
|
||||
FROM node:22-alpine AS release
|
||||
RUN apk --no-cache add git
|
||||
WORKDIR /home/node/app
|
||||
|
||||
RUN corepack enable
|
||||
|
||||
COPY ./dist/packages ./packages
|
||||
COPY ./.yarnrc.yml ./
|
||||
COPY ./docs ./docs
|
||||
# Para las migraciones
|
||||
COPY ./deployment ./deployment
|
||||
|
||||
COPY ./package.json ./
|
||||
RUN node -e "\
|
||||
const p = require('./package.json'); \
|
||||
delete p.workspaces; \
|
||||
delete p.scripts; \
|
||||
delete p.devDependencies; \
|
||||
require('fs').writeFileSync('./package.json', JSON.stringify(p, null, 2));"
|
||||
RUN npm install --omit=dev
|
||||
|
||||
RUN mkdir -p packages/node_modules && \
|
||||
ln -sf ../sim-shared packages/node_modules/sim-shared
|
||||
RUN yarn install
|
||||
|
||||
RUN mkdir -p dist && ln -sf ../packages dist/packages
|
||||
|
||||
COPY ./entrypoint.sh ./
|
||||
RUN chmod +x entrypoint.sh
|
||||
|
||||
EXPOSE ${PORT:-3000}
|
||||
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
||||
|
||||
@@ -23,14 +23,18 @@ services:
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
environment:
|
||||
RABBITMQ_DEFAULT_USER: ${RABBITMQ_USER}
|
||||
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD}
|
||||
RABBITMQ_USER: ${RABBITMQ_USER}
|
||||
RABBITMQ_PASSWORD: ${RABBITMQ_PASSWORD}
|
||||
entrypoint: ["bash", "/usr/local/bin/docker-entrypoint-wrapper.sh"]
|
||||
command: ["rabbitmq-server"]
|
||||
volumes:
|
||||
- ./rabbit/docker-entrypoint-wrapper.sh:/usr/local/bin/docker-entrypoint-wrapper.sh:ro
|
||||
- ./rabbitmq_plugins/enabled_plugins:/etc/rabbitmq/enabled_plugins:ro
|
||||
- ./rabbit/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:ro
|
||||
- ./rabbit/definitions.json:/etc/rabbitmq/definitions.json:ro
|
||||
- ./rabbit/definitions.json:/etc/rabbitmq/definitions.template.json:ro
|
||||
networks:
|
||||
- internal
|
||||
- proxy
|
||||
labels:
|
||||
- "io.portainer.accesscontrol.teams=develop"
|
||||
- "traefik.enable=true"
|
||||
@@ -60,8 +64,6 @@ services:
|
||||
depends_on:
|
||||
rabbitmq-sim-broker:
|
||||
condition: service_healthy
|
||||
postgresql-sim:
|
||||
condition: service_healthy
|
||||
networks:
|
||||
- savefamily
|
||||
- proxy
|
||||
@@ -72,6 +74,8 @@ services:
|
||||
- ./.env:/home/node/app/.env:ro
|
||||
- ./sim-consumidor-objenious.env:/home/node/app/packages/sim-consumidor-objenious/.env:ro
|
||||
- ./sim-objenious-cron.env:/home/node/app/packages/sim-objenious-cron/.env:ro
|
||||
- ./obj.pem:/home/node/app/packages/sim-consumidor-objenious/obj.pem:ro
|
||||
- ./obj.pem:/home/node/app/packages/sim-objenious-cron/obj.pem:ro
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/bin/sh
|
||||
cd /home
|
||||
|
||||
cd /home/node/app && yarn start
|
||||
# (cd /home/node/app/packages/sim-objenious-cron && node index.js) &
|
||||
# (cd /home/node/app/packages/sim-consumidor-objenious && node index.js) &
|
||||
# cd /home/node/app/packages/sim-entrada-eventos && exec node index.js
|
||||
cd /home/node/app
|
||||
yarn migrate
|
||||
yarn start
|
||||
|
||||
@@ -50,6 +50,10 @@ pipeline {
|
||||
cleanRemote: false,
|
||||
execCommand: "ln -sf $BASE_REMOTE_PATH/vault/savefamily/sf-sims/sim-objenious-cron.env $APP_REMOTE_PATH/sim-objenious-cron.env"
|
||||
),
|
||||
sshTransfer(
|
||||
cleanRemote: false,
|
||||
execCommand: "ln -sf $BASE_REMOTE_PATH/vault/savefamily/sf-sims/obj.pem $APP_REMOTE_PATH/obj.pem"
|
||||
),
|
||||
sshTransfer(
|
||||
cleanRemote: false,
|
||||
remoteDirectory: "$APP_REMOTE_PATH",
|
||||
@@ -59,14 +63,18 @@ pipeline {
|
||||
sshTransfer(
|
||||
cleanRemote: false,
|
||||
remoteDirectory: "$APP_REMOTE_PATH",
|
||||
sourceFiles: "deployment/database/**/*",
|
||||
removePrefix: "deployment",
|
||||
sourceFiles: "docs/**/*",
|
||||
),
|
||||
sshTransfer(
|
||||
cleanRemote: false,
|
||||
remoteDirectory: "$APP_REMOTE_PATH",
|
||||
sourceFiles: "deployment/rabbit/**/*",
|
||||
removePrefix: "deployment",
|
||||
sourceFiles: "deployment/database/**/*",
|
||||
),
|
||||
sshTransfer(
|
||||
cleanRemote: false,
|
||||
remoteDirectory: "$APP_REMOTE_PATH",
|
||||
sourceFiles: "deployment/develop/rabbit/**/*",
|
||||
removePrefix: "deployment/develop",
|
||||
),
|
||||
sshTransfer(
|
||||
cleanRemote: false,
|
||||
@@ -84,6 +92,11 @@ pipeline {
|
||||
remoteDirectory: "$APP_REMOTE_PATH",
|
||||
sourceFiles: "package.json",
|
||||
),
|
||||
sshTransfer(
|
||||
cleanRemote: false,
|
||||
remoteDirectory: "$APP_REMOTE_PATH",
|
||||
sourceFiles: ".yarnrc.yml",
|
||||
),
|
||||
sshTransfer(
|
||||
cleanRemote: false,
|
||||
execCommand: "sh $APP_REMOTE_PATH/rebuild.sh"
|
||||
|
||||
90
deployment/develop/rabbit/definitions.json
Normal file
90
deployment/develop/rabbit/definitions.json
Normal file
@@ -0,0 +1,90 @@
|
||||
{
|
||||
"rabbit_version": "4.2.2",
|
||||
"rabbitmq_version": "4.2.2",
|
||||
"product_name": "RabbitMQ",
|
||||
"product_version": "4.2.2",
|
||||
"users": [
|
||||
{
|
||||
"name": "RABBITMQ_USER_PLACEHOLDER",
|
||||
"password": "RABBITMQ_PASSWORD_PLACEHOLDER",
|
||||
"tags": ["administrator"]
|
||||
}
|
||||
],
|
||||
"vhosts": [
|
||||
{
|
||||
"name": "sim-vhost"
|
||||
}
|
||||
],
|
||||
"permissions": [
|
||||
{
|
||||
"user": "RABBITMQ_USER_PLACEHOLDER",
|
||||
"vhost": "sim-vhost",
|
||||
"configure": ".*",
|
||||
"write": ".*",
|
||||
"read": ".*"
|
||||
}
|
||||
],
|
||||
"topic_permissions": [],
|
||||
"parameters": [],
|
||||
"global_parameters": [
|
||||
{
|
||||
"name": "cluster_name",
|
||||
"value": "rabbit@a8d5c6e08439"
|
||||
},
|
||||
{
|
||||
"name": "internal_cluster_id",
|
||||
"value": "rabbitmq-cluster-id-gXeBLbsUC2W2tU0Bx_QY_w"
|
||||
}
|
||||
],
|
||||
"policies": [
|
||||
{
|
||||
"vhost": "sim-vhost",
|
||||
"name": "pol.sim.dlx",
|
||||
"pattern": "sim.*",
|
||||
"apply-to": "queues",
|
||||
"definition": {
|
||||
"dead-letter-exchange": "sim.dlx"
|
||||
},
|
||||
"priority": 7
|
||||
}
|
||||
],
|
||||
"exchanges": [
|
||||
{
|
||||
"name": "sim.exchange",
|
||||
"vhost": "sim-vhost",
|
||||
"type": "topic",
|
||||
"durable": true,
|
||||
"auto_delete": false,
|
||||
"internal": false,
|
||||
"argurments": {}
|
||||
},
|
||||
{
|
||||
"name": "sim.dlx",
|
||||
"vhost": "sim-vhost",
|
||||
"type": "topic",
|
||||
"durable": true,
|
||||
"auto_delete": false,
|
||||
"internal": false,
|
||||
"argurments": {}
|
||||
}
|
||||
],
|
||||
"queues": [
|
||||
{
|
||||
"name": "sim.logs",
|
||||
"vhost": "sim-vhost",
|
||||
"durable": true,
|
||||
"auto_delete": false,
|
||||
"arguments": {}
|
||||
}
|
||||
],
|
||||
"bindings": [
|
||||
{
|
||||
"source": "sim.exchange",
|
||||
"vhost": "sim-vhost",
|
||||
"destination": "sim.logs",
|
||||
"destination_type": "queue",
|
||||
"routing_key": "sim.#",
|
||||
"arguments": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
12
deployment/develop/rabbit/docker-entrypoint-wrapper.sh
Executable file
12
deployment/develop/rabbit/docker-entrypoint-wrapper.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
set -eu
|
||||
|
||||
# Substitute env vars into definitions template before RabbitMQ starts.
|
||||
# RabbitMQ 4.x skips default user creation when definitions.json is loaded,
|
||||
# so the user must be defined in the JSON itself.
|
||||
sed \
|
||||
-e "s|RABBITMQ_USER_PLACEHOLDER|${RABBITMQ_USER}|g" \
|
||||
-e "s|RABBITMQ_PASSWORD_PLACEHOLDER|${RABBITMQ_PASSWORD}|g" \
|
||||
/etc/rabbitmq/definitions.template.json > /etc/rabbitmq/definitions.json
|
||||
|
||||
exec docker-entrypoint.sh "$@"
|
||||
4
deployment/develop/rabbit/rabbitmq.conf
Normal file
4
deployment/develop/rabbit/rabbitmq.conf
Normal file
@@ -0,0 +1,4 @@
|
||||
management.load_definitions = /etc/rabbitmq/definitions.json
|
||||
|
||||
default_vhost = sim-vhost
|
||||
default_queue_type = quorum
|
||||
28
deployment/local/docker/Dockerfile.dev
Normal file
28
deployment/local/docker/Dockerfile.dev
Normal file
@@ -0,0 +1,28 @@
|
||||
# Stage base para coordinar las fases de build y ejecucion
|
||||
FROM node:22-alpine AS base
|
||||
# Hace falta para la herramienta de migraciones, cuando se publique se
|
||||
# sustituira por el paquete de npm
|
||||
RUN apk --no-cache add git
|
||||
WORKDIR /usr/local/app
|
||||
RUN corepack enable && \
|
||||
corepack prepare yarn@4.12.0 --activate
|
||||
|
||||
COPY ./package.json ./yarn.lock ./
|
||||
COPY ./packages ./packages
|
||||
|
||||
# copia el codigo en general
|
||||
COPY tsconfig*.json ./
|
||||
COPY .env* ./
|
||||
COPY ./.yarnrc.yml ./
|
||||
COPY ./docs ./docs
|
||||
COPY ./deployment/local/docker/start.sh ./
|
||||
# Copiar el archivo de migrations? porque ahora no creo que se esté lanzando nada
|
||||
COPY ./deployment/database/migrations ./deployment/database/migrations
|
||||
RUN yarn install && \
|
||||
yarn cache clean && \
|
||||
yarn build && \
|
||||
chmod +x start.sh
|
||||
EXPOSE ${PORT}
|
||||
ENTRYPOINT [ "./start.sh" ]
|
||||
|
||||
|
||||
@@ -24,14 +24,15 @@ services:
|
||||
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD}
|
||||
volumes:
|
||||
- ./rabbitmq_plugins/enabled_plugins:/etc/rabbitmq/enabled_plugins:ro
|
||||
- ./deployment/rabbit/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:ro
|
||||
- ./deployment/rabbit/definitions.json:/etc/rabbitmq/definitions.json:ro
|
||||
- ./deployment/local/rabbit/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:ro
|
||||
- ./deployment/local/rabbit/definitions.json:/etc/rabbitmq/definitions.json:ro
|
||||
|
||||
sim-gateway:
|
||||
container_name: sim-gateway
|
||||
sf-sims-api:
|
||||
container_name: sf-sims-api
|
||||
image: sf-sims-api
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: deployment/Dockerfile.dev
|
||||
dockerfile: deployment/local/docker/Dockerfile.dev
|
||||
args:
|
||||
PORT: "${PORT:-3000}"
|
||||
develop:
|
||||
@@ -39,6 +40,9 @@ services:
|
||||
- path: ./packages
|
||||
action: sync
|
||||
target: /usr/local/app/packages
|
||||
- path: ./docs
|
||||
action: sync
|
||||
target: /usr/local/app/docs
|
||||
- path: ./package.json
|
||||
action: rebuild
|
||||
ports:
|
||||
@@ -46,19 +50,31 @@ services:
|
||||
env_file:
|
||||
- .env
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD-SHELL",
|
||||
'node -e "fetch(''http://localhost:'' + (process.env.PORT || 3000) + ''/health'').then(r => { if (!r.ok) process.exit(1) }).catch(() => process.exit(1))"',
|
||||
]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 15s
|
||||
depends_on:
|
||||
rabbitmq-sim-broker:
|
||||
condition: service_healthy
|
||||
postgresql-sim:
|
||||
condition: service_healthy
|
||||
|
||||
postgresql-sim:
|
||||
container_name: postgresql-sim
|
||||
image: postgres:16.1
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- "5432:${DEV_POSTGRES_PORT}"
|
||||
- "${POSTGRES_PORT}:${POSTGRES_PORT}"
|
||||
volumes:
|
||||
- ./sql-data/:/var/lib/postgres/data
|
||||
- ./deployment/database/init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"]
|
||||
interval: 5s
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd /mnt/docker-storage/containers/savefamily/sf-shopify-orders
|
||||
# cd /mnt/docker-storage/containers/savefamily/sf-shopify-orders
|
||||
cd /mnt/docker-storage/containers/savefamily/sf-sims-api
|
||||
|
||||
docker stop sf-shopify-orders-api || true
|
||||
docker rm sf-shopify-orders-api || true
|
||||
docker rmi sf-shopify-orders-api || true
|
||||
docker stop sf-sims-api || true
|
||||
docker rm sf-sims-api || true
|
||||
docker rmi sf-sims-api || true
|
||||
|
||||
docker compose -f docker-compose.yaml up --build -d
|
||||
|
||||
3
deployment/local/docker/start.sh
Normal file
3
deployment/local/docker/start.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/bin/sh
|
||||
echo "Lanzando migraciones e iniciando servidor"
|
||||
yarn migrate && yarn start
|
||||
@@ -1,6 +1,3 @@
|
||||
default_user = guest
|
||||
default_pass = guest
|
||||
|
||||
listeners.tcp.default = 5672
|
||||
management.tcp.port = 15672
|
||||
|
||||
25
docs/sim-api-documentation.html
Normal file
25
docs/sim-api-documentation.html
Normal file
File diff suppressed because one or more lines are too long
@@ -11,7 +11,7 @@ post {
|
||||
}
|
||||
|
||||
body:form-urlencoded {
|
||||
iccid: 8933201125065160406
|
||||
iccid: 8933201125065160380
|
||||
offer: SAVEFAMILY1
|
||||
}
|
||||
|
||||
|
||||
16
docs/sim-api/Activation Email Health.bru
Normal file
16
docs/sim-api/Activation Email Health.bru
Normal file
@@ -0,0 +1,16 @@
|
||||
meta {
|
||||
name: Activation Email Health
|
||||
type: http
|
||||
seq: 8
|
||||
}
|
||||
|
||||
post {
|
||||
url: https://sf-sim-activation.savefamily.net/health
|
||||
body: none
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
42
docs/sim-api/Activation Email.bru
Normal file
42
docs/sim-api/Activation Email.bru
Normal file
@@ -0,0 +1,42 @@
|
||||
meta {
|
||||
name: Activation Email
|
||||
type: http
|
||||
seq: 6
|
||||
}
|
||||
|
||||
post {
|
||||
url: https://sf-sim-activation.savefamily.net/send-activation-mail
|
||||
body: json
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
headers {
|
||||
x-apikey-sim-activation: 9e48c4ac-1ab0-4397-b3f3-6c239200dfe6
|
||||
}
|
||||
|
||||
body:json {
|
||||
{
|
||||
"id": "11",
|
||||
"retry_count": 0,
|
||||
"max_retry": null,
|
||||
"max_date_retry": null,
|
||||
"iccids": [
|
||||
"8933201125068886080"
|
||||
],
|
||||
"request_id": "14362",
|
||||
"mass_action_id": "5208468",
|
||||
"operation": "activate",
|
||||
"start_date": "2026-02-13T11:08:42.499Z",
|
||||
"last_change_date": "2026-02-16T09:24:36.073Z",
|
||||
"end_date": "2026-02-16T09:24:36.073Z",
|
||||
"error": null,
|
||||
"status": "finished",
|
||||
"objenious_status": "Terminé",
|
||||
"msisdn": "33764399870"
|
||||
}
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
@@ -11,10 +11,45 @@ post {
|
||||
}
|
||||
|
||||
body:form-urlencoded {
|
||||
iccid: 8933201124059176320
|
||||
iccid: 8933201125068890074
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
|
||||
docs {
|
||||
El endpoint recibe como body
|
||||
```
|
||||
{
|
||||
iccid: string,
|
||||
update_webhook?: string
|
||||
}
|
||||
```
|
||||
|
||||
`update_webhook` está en desarrollo, pero será donde se mande la actualizacion de la cancelación cuando haya una respuesta de la API externa.
|
||||
|
||||
Si la llamada tiene exito devuelve:
|
||||
``` json
|
||||
{
|
||||
data: {
|
||||
iccid: string,
|
||||
message_id: string,
|
||||
operation: "cancelation"
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
message_id se usará para la llamada /orders/message_id/}{message_id}
|
||||
|
||||
Si la llamada falla devolvera:
|
||||
```json
|
||||
{
|
||||
errors: {
|
||||
msg: string
|
||||
... (campos extra de gestion del error)
|
||||
}
|
||||
}
|
||||
```
|
||||
}
|
||||
|
||||
16
docs/sim-api/Docs.bru
Normal file
16
docs/sim-api/Docs.bru
Normal file
@@ -0,0 +1,16 @@
|
||||
meta {
|
||||
name: Docs
|
||||
type: http
|
||||
seq: 12
|
||||
}
|
||||
|
||||
get {
|
||||
url: {{baseurl}}/docs/sim-api-documentation.html
|
||||
body: none
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
16
docs/sim-api/Get pending orders.bru
Normal file
16
docs/sim-api/Get pending orders.bru
Normal file
@@ -0,0 +1,16 @@
|
||||
meta {
|
||||
name: Get pending orders
|
||||
type: http
|
||||
seq: 11
|
||||
}
|
||||
|
||||
get {
|
||||
url: {{baseurl}}/orders/pending
|
||||
body: none
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
16
docs/sim-api/Order by id.bru
Normal file
16
docs/sim-api/Order by id.bru
Normal file
@@ -0,0 +1,16 @@
|
||||
meta {
|
||||
name: Order by id
|
||||
type: http
|
||||
seq: 9
|
||||
}
|
||||
|
||||
get {
|
||||
url: {{baseurl}}/orders/
|
||||
body: none
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
20
docs/sim-api/Orders by message_id.bru
Normal file
20
docs/sim-api/Orders by message_id.bru
Normal file
@@ -0,0 +1,20 @@
|
||||
meta {
|
||||
name: Orders by message_id
|
||||
type: http
|
||||
seq: 12
|
||||
}
|
||||
|
||||
get {
|
||||
url: {{baseurl}}/orders/message_id/019c93d3-014a-711d-b958-03dd629be78d
|
||||
body: none
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
params:query {
|
||||
~message_id: 019c93d3-014a-711d-b958-03dd629be78d
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
@@ -15,7 +15,7 @@ params:query {
|
||||
}
|
||||
|
||||
body:form-urlencoded {
|
||||
iccid: 8933201125065160414
|
||||
iccid: 8933201125068886700
|
||||
}
|
||||
|
||||
settings {
|
||||
|
||||
21
docs/sim-api/ReActivate.bru
Normal file
21
docs/sim-api/ReActivate.bru
Normal file
@@ -0,0 +1,21 @@
|
||||
meta {
|
||||
name: ReActivate
|
||||
type: http
|
||||
seq: 13
|
||||
}
|
||||
|
||||
post {
|
||||
url: {{baseurl}}/sim/reActivate
|
||||
body: formUrlEncoded
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
body:form-urlencoded {
|
||||
iccid: 8933201125065160380
|
||||
~offer: SAVEFAMILY1
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
21
docs/sim-api/Test Order.bru
Normal file
21
docs/sim-api/Test Order.bru
Normal file
@@ -0,0 +1,21 @@
|
||||
meta {
|
||||
name: Test Order
|
||||
type: http
|
||||
seq: 9
|
||||
}
|
||||
|
||||
post {
|
||||
url: {{baseurl}}/sim/test
|
||||
body: formUrlEncoded
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
body:form-urlencoded {
|
||||
iccid: 8933201125065160999
|
||||
offer: SAVEFAMILY1
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
34
docs/sim-api/collection.bru
Normal file
34
docs/sim-api/collection.bru
Normal file
@@ -0,0 +1,34 @@
|
||||
docs {
|
||||
Los endpoint tienen unos campos comunes de entrada:
|
||||
```ts
|
||||
{
|
||||
iccid: string,
|
||||
update_webhook?: string
|
||||
}
|
||||
```
|
||||
|
||||
`update_webhook` está en desarrollo, pero será donde se mande la actualizacion de la cancelación cuando haya una respuesta de la API externa.
|
||||
|
||||
Si la llamada tiene exito devuelve:
|
||||
```ts
|
||||
{
|
||||
data: {
|
||||
iccid: string,
|
||||
message_id: string,
|
||||
operation: string,
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
message_id se usará para la llamada /orders/message_id/}{message_id}
|
||||
|
||||
Si la llamada falla devolvera:
|
||||
```ts
|
||||
{
|
||||
errors: {
|
||||
msg: string
|
||||
... (campos extra de gestion del error)
|
||||
}
|
||||
}
|
||||
```
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
vars {
|
||||
baseurl: http://localhost:3000
|
||||
}
|
||||
color: #2E8A54
|
||||
|
||||
4
docs/sim-api/environments/prod.bru
Normal file
4
docs/sim-api/environments/prod.bru
Normal file
@@ -0,0 +1,4 @@
|
||||
vars {
|
||||
baseurl: https://sf-sims.savefamilygps.net
|
||||
}
|
||||
color: #CE4F3B
|
||||
4
docs/sim-api/environments/simconnections.bru
Normal file
4
docs/sim-api/environments/simconnections.bru
Normal file
@@ -0,0 +1,4 @@
|
||||
vars {
|
||||
baseurl: http://sim-connections.savefamilygps.net
|
||||
}
|
||||
color: #C77A0F
|
||||
20
docs/sim-api/test proxy.bru
Normal file
20
docs/sim-api/test proxy.bru
Normal file
@@ -0,0 +1,20 @@
|
||||
meta {
|
||||
name: test proxy
|
||||
type: http
|
||||
seq: 14
|
||||
}
|
||||
|
||||
get {
|
||||
url: {{baseurl}}/simconnections/alai/select?iccid=1111111111111111111
|
||||
body: none
|
||||
auth: inherit
|
||||
}
|
||||
|
||||
params:query {
|
||||
iccid: 1111111111111111111
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
38
docs/sim-objenious/Alarmas disponibles.bru
Normal file
38
docs/sim-objenious/Alarmas disponibles.bru
Normal file
@@ -0,0 +1,38 @@
|
||||
meta {
|
||||
name: Alarmas disponibles
|
||||
type: http
|
||||
seq: 20
|
||||
}
|
||||
|
||||
get {
|
||||
url: https://api-getway.objenious.com/ws/alarms
|
||||
body: formUrlEncoded
|
||||
auth: bearer
|
||||
}
|
||||
|
||||
auth:bearer {
|
||||
token: {{ws-access-token-partenaire}}
|
||||
}
|
||||
|
||||
body:json {
|
||||
{
|
||||
"identifier": {
|
||||
"identifiers": ["8933201124059175967"],
|
||||
"identifierType": "ICCID"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
body:form-urlencoded {
|
||||
~identifier.identifierType: "ICCID"
|
||||
~identifier.identifiers: ["8933201124059175967"]
|
||||
}
|
||||
|
||||
vars:pre-request {
|
||||
~id: 5187320
|
||||
}
|
||||
|
||||
settings {
|
||||
encodeUrl: true
|
||||
timeout: 0
|
||||
}
|
||||
@@ -5,16 +5,16 @@ meta {
|
||||
}
|
||||
|
||||
get {
|
||||
url: https://api-getway.objenious.com/ws/lines?pageSize=10&identifier.identifierType=ICCID&identifier.identifiers=8933201125065160455
|
||||
url: https://api-getway.objenious.com/ws/lines?pageSize=1000&simStatus=ACTIVATED
|
||||
body: formUrlEncoded
|
||||
auth: bearer
|
||||
}
|
||||
|
||||
params:query {
|
||||
pageSize: 10
|
||||
identifier.identifierType: ICCID
|
||||
identifier.identifiers: 8933201125065160455
|
||||
~simStatus: ACTIVATED
|
||||
pageSize: 1000
|
||||
simStatus: ACTIVATED
|
||||
~identifier.identifierType: ICCID
|
||||
~identifier.identifiers: 8933201125065160455
|
||||
}
|
||||
|
||||
auth:bearer {
|
||||
|
||||
@@ -37,7 +37,7 @@ body:form-urlencoded {
|
||||
}
|
||||
|
||||
vars:pre-request {
|
||||
params.id: 14111
|
||||
params.id: 15102
|
||||
}
|
||||
|
||||
settings {
|
||||
|
||||
@@ -5,13 +5,13 @@ meta {
|
||||
}
|
||||
|
||||
get {
|
||||
url: {{actionsUrl}}/massActions?massActionId=5192767
|
||||
url: {{actionsUrl}}/massActions?massActionId=5363116
|
||||
body: formUrlEncoded
|
||||
auth: bearer
|
||||
}
|
||||
|
||||
params:query {
|
||||
massActionId: 5192767
|
||||
massActionId: 5363116
|
||||
~identifier.identifierType: ICCID
|
||||
~identifier.identifiers: 8933201125065160463,8933201125065160422
|
||||
}
|
||||
|
||||
1843
package-lock.json
generated
1843
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"name": "sim-eventos",
|
||||
"version": "1.0.0",
|
||||
"packageManager": "yarn@4.12.0",
|
||||
"workspaces": [
|
||||
"packages/*"
|
||||
@@ -14,9 +15,11 @@
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint --fix .",
|
||||
"format": "prettier --write .",
|
||||
"format:check": "prettier --check ."
|
||||
"format:check": "prettier --check .",
|
||||
"migrate": "yarn db-migrate -e .env -m deployment/database/migrations -t 99.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@sf-alvar/db-migrate": "1.0.6",
|
||||
"@tsconfig/node22": "^22.0.5",
|
||||
"amqp-connection-manager": "^5.0.0",
|
||||
"amqplib": "^0.10.9",
|
||||
@@ -26,6 +29,7 @@
|
||||
"express": "^5.2.1",
|
||||
"pg": "^8.18.0",
|
||||
"typescript": "^5.9.3",
|
||||
"uuidv7": "^1.1.0",
|
||||
"vite": "^7.3.1",
|
||||
"vite-tsconfig-paths": "^6.0.5"
|
||||
},
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
console.log("Template")
|
||||
console.log(new Date().toISOString())
|
||||
|
||||
export default {}
|
||||
|
||||
@@ -1,22 +1,3 @@
|
||||
PORT=3000
|
||||
RABBITMQ_USER=guest
|
||||
RABBITMQ_PASSWORD=guest
|
||||
NOS_BASE_URL=localhost
|
||||
|
||||
ENVIORMENT=development
|
||||
|
||||
RABBITMQ_HOST=rabbitmq-sim-broker
|
||||
#RABBITMQ_HOST=localhost
|
||||
RABBITMQ_PORT=5672
|
||||
RABBITMQ_USER=guest
|
||||
RABBITMQ_PASSWORD=guest
|
||||
RABBITMQ_SECURE=false
|
||||
RABBITMQ_VHOST=sim-vhost
|
||||
|
||||
# Hay cosas que unificar de varios servicios
|
||||
POSTGRES_DB=postgres
|
||||
POSTGRES_DATABASE=postres
|
||||
POSTGRES_HOST=postgresql-sim-1
|
||||
POSTGRES_PORT=5432
|
||||
DEV_POSTGRES_PORT=5432
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASSWORD=1234
|
||||
|
||||
20
packages/sim-consumidor-nos/config/env/index.ts
vendored
20
packages/sim-consumidor-nos/config/env/index.ts
vendored
@@ -1,5 +1,16 @@
|
||||
import { loadEnvFile } from "node:process";
|
||||
loadEnvFile("../../.env")
|
||||
import path from "node:path";
|
||||
|
||||
try {
|
||||
loadEnvFile(path.join("./.env")) // base
|
||||
} catch (e) {
|
||||
console.error("Error cargando el .env desde ./.env")
|
||||
}
|
||||
try {
|
||||
loadEnvFile(path.join("../../.env")) // Global
|
||||
} catch (e) {
|
||||
console.error("Error cargando el .env desde ../../.env")
|
||||
}
|
||||
|
||||
export const env = {
|
||||
ENVIRONMENT: process.env.ENVIORMENT,
|
||||
@@ -9,8 +20,8 @@ export const env = {
|
||||
POSTGRES_HOST: process.env.POSTGRES_HOST,
|
||||
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE,
|
||||
RABBITMQ_HOST: String(process.env.RABBITMQ_HOST ?? "localhost"),
|
||||
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "guest"),
|
||||
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "guest"),
|
||||
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "test"),
|
||||
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "test"),
|
||||
RABBITMQ_EXCHANGE: String(process.env.RABBITMQ_EXCHANGE ?? "/"),
|
||||
RABBITMQ_PORT: parseInt(process.env.RABBITMQ_PORT ?? "5672"),
|
||||
RABBITMQ_MODULENAME: process.env.MODULENAME,
|
||||
@@ -18,5 +29,8 @@ export const env = {
|
||||
RABBITMQ_SECURE: process.env.RABBITMQ_SECURE,
|
||||
RABBITMQ_RETRY_INTERVAL: process.env.RABBITMQ_INTERVAL,
|
||||
RABBITMQ_VHOST: String(process.env.RABBITMQ_VHOST),
|
||||
|
||||
// ESPECIFICO NOS
|
||||
NOS_BASE_URL: String(process.env.NOS_BASE_URL)
|
||||
};
|
||||
|
||||
|
||||
69
packages/sim-consumidor-nos/config/eventBus.config.ts
Normal file
69
packages/sim-consumidor-nos/config/eventBus.config.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { RabbitMQEventBus, RMQConnectionParams } from "sim-shared/infrastructure/RabbitMQEventBus.js"
|
||||
import { Channel } from "amqp-connection-manager"
|
||||
import { env } from "./env/index.js"
|
||||
|
||||
const rmqUser = env.RABBITMQ_USER
|
||||
const rmqPass = env.RABBITMQ_PASSWORD
|
||||
const rmqHost = env.RABBITMQ_HOST
|
||||
const rmqPort = Number(env.RABBITMQ_PORT)
|
||||
const rmqSecure = false
|
||||
const rmqVhost = env.RABBITMQ_VHOST
|
||||
|
||||
export const rmqConnOptions = <RMQConnectionParams>{
|
||||
username: rmqUser,
|
||||
password: rmqPass,
|
||||
vhost: rmqVhost,
|
||||
hostname: rmqHost,
|
||||
port: rmqPort,
|
||||
secure: rmqSecure,
|
||||
}
|
||||
|
||||
export const rabbitmqEventBus = new RabbitMQEventBus({
|
||||
connectionParams: rmqConnOptions,
|
||||
buildStructure: buildQueues,
|
||||
maxRetry: 5
|
||||
})
|
||||
|
||||
async function buildQueues(channel: Channel) {
|
||||
const QUEUES = {
|
||||
NOS: "sim.nos",
|
||||
NOSDLX: "sim.nos.dlx",
|
||||
NOSDEL: "sim.nos.delayed",
|
||||
}
|
||||
|
||||
const EXCHANGES = {
|
||||
MAIN: "sim.exchange",
|
||||
DLX: "sim.ex.nos.dlx",
|
||||
DEL: "sim.ex.nos.delayed"
|
||||
}
|
||||
|
||||
const DELAY = 10 * 1000
|
||||
const BASE_NOS_KEY = "sim.nos.#"
|
||||
|
||||
await channel.assertExchange(EXCHANGES.DEL, "topic")
|
||||
await channel.assertExchange(EXCHANGES.DLX, "topic")
|
||||
await channel.assertExchange(EXCHANGES.MAIN, "topic")
|
||||
|
||||
await channel.assertQueue(QUEUES.NOS)
|
||||
await channel.assertQueue(QUEUES.NOSDLX)
|
||||
await channel.assertQueue(QUEUES.NOSDEL, {
|
||||
durable: true,
|
||||
arguments: {
|
||||
'x-message-ttl': DELAY,
|
||||
'x-dead-letter-exchange': EXCHANGES.MAIN,
|
||||
}
|
||||
})
|
||||
|
||||
// Cola dead-letter
|
||||
await channel.bindQueue(QUEUES.NOSDLX, EXCHANGES.DLX, "sim.nos.#")
|
||||
// Cola delay
|
||||
await channel.bindQueue(QUEUES.NOSDEL, EXCHANGES.DEL, BASE_NOS_KEY)
|
||||
// Cola nos -> main exchange
|
||||
await channel.bindQueue(QUEUES.NOS, EXCHANGES.MAIN, BASE_NOS_KEY)
|
||||
|
||||
}
|
||||
|
||||
export async function startRMQClient() {
|
||||
await rabbitmqEventBus.connect()
|
||||
return rabbitmqEventBus
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
import { startRMQClient } from "#config/eventBusConfig"
|
||||
import { startRMQClient } from "#config/eventBus.config.js"
|
||||
import { SimNosController } from "./aplication/SimNOS.controller.js"
|
||||
|
||||
async function startWorker() {
|
||||
|
||||
@@ -7,7 +7,8 @@
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "yarn tsc --project tsconfig.json && yarn tsc-alias && cp package.json ../../dist/packages/sim-consumidor-nos/",
|
||||
"esbuild": "esbuild index.ts --platform=node",
|
||||
"start": "node ../../dist/packages/sim-consumidor-nos/index.js"
|
||||
"start": "node ../../dist/packages/sim-consumidor-nos/index.js",
|
||||
"dev": "tsx watch index.ts"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
|
||||
@@ -5,4 +5,6 @@ OBJ_CLI_ASSERTION=XOc7FtwXD8hUX2SFVX94XSty8wkOmChkwDNF09O_aIxPubMDdFUdCDCB4zpzSI
|
||||
OBJ_CLIENT_ID=savefamily_rest_ws
|
||||
OBJ_KID=xNfbMiyL1ORXGP8lElhcv8nVaG3EJKye4Lc1YoN3I1E
|
||||
OBJ_BASE_URL=https://api-getway.objenious.com/ws
|
||||
|
||||
OBJ_CUSTOMER_CODE=9.49411.10
|
||||
//OBJ_BASE_URL=https://api-getway.objenious.com/ws/test
|
||||
|
||||
@@ -0,0 +1,118 @@
|
||||
import { describe, it, beforeEach, mock, after } from "node:test";
|
||||
import assert from "node:assert";
|
||||
import { SimController } from "./Sim.controller.js";
|
||||
import { EventBus } from "sim-shared/domain/EventBus.port.js";
|
||||
import { SimUseCases } from "./Sim.usecases.js";
|
||||
import { ConsumeMessage } from "amqplib";
|
||||
import { postgrClient, pgPool } from "#config/postgreConfig.js";
|
||||
import { httpInstance } from "#config/httpClient.config.js";
|
||||
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
|
||||
import { PauseCancelTaskRepository } from "#adapters/PauseCancelTaskRepository.js";
|
||||
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js";
|
||||
import { ActionData } from "#domain/DTOs/objeniousapi.js";
|
||||
|
||||
describe("SimController Integration Tests (Real UseCases)", () => {
|
||||
let eventBusMock: any;
|
||||
let controller: SimController;
|
||||
let useCases: SimUseCases;
|
||||
|
||||
beforeEach(() => {
|
||||
// Mock ONLY the event bus as requested
|
||||
eventBusMock = {
|
||||
publish: mock.fn(),
|
||||
addSubscribers: mock.fn(),
|
||||
consume: mock.fn(),
|
||||
ack: mock.fn(async () => { }),
|
||||
nack: mock.fn(async () => { }),
|
||||
};
|
||||
|
||||
const operationRepository = new ObjeniousOperationsRepository(
|
||||
httpInstance,
|
||||
postgrClient,
|
||||
);
|
||||
const orderRepository = new OrderRepository(postgrClient);
|
||||
const pauseRepository = new PauseCancelTaskRepository(postgrClient);
|
||||
useCases = new SimUseCases({
|
||||
httpClient: httpInstance,
|
||||
operationRepository: operationRepository,
|
||||
orderRepository: orderRepository,
|
||||
pauseRepository: pauseRepository
|
||||
});
|
||||
// @ts-expect-error
|
||||
useCases.findActivationDate = async (data: ActionData) => new Date()
|
||||
|
||||
controller = new SimController(eventBusMock as unknown as EventBus, useCases);
|
||||
});
|
||||
|
||||
const createMockMsg = (payload: any): ConsumeMessage => {
|
||||
return {
|
||||
content: Buffer.from(JSON.stringify(payload)),
|
||||
fields: {},
|
||||
properties: {
|
||||
headers: {
|
||||
message_id: "test-correlation-id"
|
||||
}
|
||||
},
|
||||
} as unknown as ConsumeMessage;
|
||||
};
|
||||
|
||||
after(async () => {
|
||||
await pgPool.end();
|
||||
});
|
||||
|
||||
describe("suspend", () => {
|
||||
it("should call stage_suspend and interact with DB and EventBus", async () => {
|
||||
const iccid = "test-iccid-suspend-" + Date.now();
|
||||
const msg = createMockMsg({
|
||||
key: "sim.test.pause",
|
||||
payload: {
|
||||
iccid: iccid
|
||||
},
|
||||
headers: {
|
||||
message_id: "correlation-suspend-" + iccid
|
||||
}
|
||||
});
|
||||
|
||||
const handler = controller.suspend();
|
||||
await handler(msg);
|
||||
|
||||
// Verify that it reached the stage_suspend logic (which adds to pauseRepository)
|
||||
// We can query the DB or check if ACK was called
|
||||
assert.strictEqual(eventBusMock.ack.mock.callCount(), 1, "Message should be ACKed on success");
|
||||
assert.strictEqual(eventBusMock.nack.mock.callCount(), 0, "Message should not be NACKed");
|
||||
});
|
||||
});
|
||||
|
||||
describe("terminate", () => {
|
||||
it("should call stage_terminate and interact with DB and EventBus", async () => {
|
||||
const iccid = "test-iccid-terminate-" + Date.now();
|
||||
const msg = createMockMsg({
|
||||
key: "sim.test.pause",
|
||||
payload: {
|
||||
iccid: iccid
|
||||
},
|
||||
headers: {
|
||||
message_id: "correlation-terminate-" + iccid
|
||||
}
|
||||
});
|
||||
|
||||
const handler = controller.terminate();
|
||||
await handler(msg);
|
||||
|
||||
assert.strictEqual(eventBusMock.ack.mock.callCount(), 1, "Message should be ACKed on success");
|
||||
assert.strictEqual(eventBusMock.nack.mock.callCount(), 0, "Message should not be NACKed");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Handling", () => {
|
||||
it("should nack if message is invalid", async () => {
|
||||
const msg = {
|
||||
content: Buffer.from("invalid json"),
|
||||
fields: {},
|
||||
properties: {},
|
||||
} as unknown as ConsumeMessage;
|
||||
const handler = controller.suspend();
|
||||
await assert.rejects(handler(msg), "Error de suspension consumiendo el mensaje no es valido");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -3,6 +3,7 @@ import { ConsumeMessage } from "amqplib";
|
||||
import { SimUseCases } from "./Sim.usecases.js";
|
||||
import { SimEvents } from "sim-shared/domain/SimEvents.js";
|
||||
import { Result } from "sim-shared/domain/Result.js";
|
||||
import { ActionData } from "#domain/DTOs/objeniousapi.js";
|
||||
|
||||
/**
|
||||
* La clase usa generadores de funciones para mantener el contexto
|
||||
@@ -36,6 +37,7 @@ export class SimController {
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error al decodificar JSON:', error);
|
||||
console.error(Buffer.from(msg.content).toString(("utf8")))
|
||||
// Aquí podrías decidir devolver el string crudo o null
|
||||
return undefined;
|
||||
}
|
||||
@@ -64,6 +66,8 @@ export class SimController {
|
||||
}
|
||||
|
||||
public activate() {
|
||||
const DUE_DATE_SECONDS = 2 * 60
|
||||
|
||||
return async (msg: ConsumeMessage) => {
|
||||
let msgData;
|
||||
try {
|
||||
@@ -80,9 +84,10 @@ export class SimController {
|
||||
throw new Error("Error activando la sim, no se ha especificado la oferta")
|
||||
}
|
||||
|
||||
this.tryUseCase(msg, this.useCases.activate({
|
||||
dueDate: this.genDueDate(2 * 60).toISOString(),
|
||||
customerAccountCode: "9.49411.10", // TODO: Al .env
|
||||
const resp = await this.tryUseCase(msg, this.useCases.activate({
|
||||
correlation_id: msgData.headers?.message_id,
|
||||
dueDate: this.genDueDate(DUE_DATE_SECONDS).toISOString(),
|
||||
customerAccountCode: "9.49411.10",
|
||||
identifier: {
|
||||
identifierType: "ICCID",
|
||||
identifiers: [iccid]
|
||||
@@ -92,6 +97,11 @@ export class SimController {
|
||||
services: []
|
||||
}
|
||||
}))
|
||||
|
||||
// TODO:
|
||||
// - Crear un registro de operación
|
||||
// - Si ha salido bien id de operación -> webhook?
|
||||
// - Si ha salido mal notificar solo cuando se manda a dlx ??
|
||||
}
|
||||
}
|
||||
|
||||
@@ -109,7 +119,8 @@ export class SimController {
|
||||
}
|
||||
|
||||
const iccid = msgData.payload.iccid
|
||||
this.tryUseCase(msg, this.useCases.preActivate({
|
||||
const res = await this.tryUseCase(msg, this.useCases.preActivate({
|
||||
correlation_id: msgData.headers?.message_id,
|
||||
dueDate: this.genDueDate(2 * 60).toISOString(),
|
||||
identifier: {
|
||||
identifierType: "ICCID",
|
||||
@@ -135,7 +146,8 @@ export class SimController {
|
||||
}
|
||||
|
||||
const iccid = msgData.payload.iccid
|
||||
this.tryUseCase(msg, this.useCases.suspend({
|
||||
const res = await this.tryUseCase(msg, this.useCases.reActivate({
|
||||
correlation_id: msgData.headers?.message_id,
|
||||
dueDate: this.genDueDate(2 * 60).toISOString(),
|
||||
identifier: {
|
||||
identifierType: "ICCID",
|
||||
@@ -146,6 +158,9 @@ export class SimController {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lo mismo que pause
|
||||
*/
|
||||
public suspend() {
|
||||
return async (msg: ConsumeMessage) => {
|
||||
let msgData;
|
||||
@@ -160,13 +175,18 @@ export class SimController {
|
||||
}
|
||||
|
||||
const iccid = msgData.payload.iccid
|
||||
this.tryUseCase(msg, this.useCases.suspend({
|
||||
const suspendData: ActionData = {
|
||||
correlation_id: msgData.headers?.message_id,
|
||||
dueDate: this.genDueDate(2 * 60).toISOString(),
|
||||
identifier: {
|
||||
identifierType: "ICCID",
|
||||
identifiers: [iccid]
|
||||
identifiers: [iccid] // Por algún motivo solo he puesto un iccd por identifier
|
||||
}
|
||||
}))
|
||||
}
|
||||
const useCaseRes = await this.tryUseCase(msg, this.useCases.stage_suspend(suspendData))
|
||||
/*
|
||||
const res = await this.tryUseCase(msg, this.useCases.suspend(actionData))
|
||||
*/
|
||||
|
||||
}
|
||||
}
|
||||
@@ -183,15 +203,20 @@ export class SimController {
|
||||
if (msgData == undefined) {
|
||||
return Promise.reject("Mensaje invalido")
|
||||
}
|
||||
|
||||
const iccid = msgData.payload.iccid
|
||||
console.log("Mensaje procesado", String(msgData))
|
||||
this.tryUseCase(msg, this.useCases.terminate({
|
||||
const terminateActionData: ActionData = {
|
||||
correlation_id: msgData.headers?.message_id,
|
||||
dueDate: this.genDueDate(2 * 60).toISOString(),
|
||||
identifier: {
|
||||
identifierType: "ICCID",
|
||||
identifiers: [iccid]
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
//const res = await this.tryUseCase(msg, this.useCases.terminate(terminateActionData))
|
||||
const res = await this.tryUseCase(msg, this.useCases.stage_terminate(terminateActionData))
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -213,3 +238,4 @@ export class SimController {
|
||||
return dueDate
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -18,8 +18,8 @@ export class SimRouter {
|
||||
this.routes = new Map([
|
||||
["activate", this.simController.activate()],
|
||||
["pause", this.simController.suspend()],
|
||||
["cancel", this.simController.terminate()], // terminate
|
||||
["reActivate", this.simController.reActivate()],
|
||||
["cancel", this.simController.terminate()],
|
||||
["reactivate", this.simController.reActivate()],
|
||||
["preActivate", this.simController.preActivate()]
|
||||
]);
|
||||
}
|
||||
@@ -27,6 +27,8 @@ export class SimRouter {
|
||||
/**
|
||||
* Enruta el mensaje a la acción correspondiente basándose en la routing key
|
||||
* TODO: No estoy seguro que deba meter el nack aqui
|
||||
* - De moemento el ack-nack se gestiona en los controller, por si acaso hay casos
|
||||
* limite en
|
||||
*/
|
||||
public route = async (msg: ConsumeMessage | null): Promise<void> => {
|
||||
if (!msg) {
|
||||
|
||||
@@ -3,6 +3,10 @@ import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js"
|
||||
import { AxiosError } from "axios"
|
||||
import { Result } from "sim-shared/domain/Result.js"
|
||||
import { ObjeniousOperation, IOperationsRepository as OperationsRepositoryPort } from "sim-shared/domain/operationsRepository.port.js"
|
||||
import assert from "node:assert"
|
||||
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
|
||||
import { CreatePauseCancelTaskDTO, PauseCancelTaskRepository } from "#adapters/PauseCancelTaskRepository.js"
|
||||
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js"
|
||||
|
||||
// TODO:
|
||||
// - Pasar a un archivo de DTOs
|
||||
@@ -10,26 +14,122 @@ import { ObjeniousOperation, IOperationsRepository as OperationsRepositoryPort }
|
||||
|
||||
export class SimUseCases {
|
||||
private readonly httpClient: HttpClient
|
||||
private readonly operationRepository: OperationsRepositoryPort
|
||||
private readonly objeniousRepository: ObjeniousOperationsRepository
|
||||
private readonly orderRepository: OrderRepository
|
||||
private readonly pauseRepository: PauseCancelTaskRepository
|
||||
|
||||
constructor(args: {
|
||||
httpClient: HttpClient,
|
||||
operationRepository: OperationsRepositoryPort
|
||||
operationRepository: ObjeniousOperationsRepository,
|
||||
orderRepository: OrderRepository,
|
||||
pauseRepository: PauseCancelTaskRepository
|
||||
}) {
|
||||
this.httpClient = args.httpClient
|
||||
this.operationRepository = args.operationRepository
|
||||
this.objeniousRepository = args.operationRepository
|
||||
this.orderRepository = args.orderRepository
|
||||
this.pauseRepository = args.pauseRepository
|
||||
}
|
||||
|
||||
private async logOperation(data: ObjeniousOperation) {
|
||||
await this.operationRepository.createOperation({
|
||||
await this.objeniousRepository.createOperation({
|
||||
...data
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Garantiza el flujo de todos los casos de uso de:
|
||||
* - Petición según la acción
|
||||
* - Control de errores
|
||||
* - Siempre devuelve un Result
|
||||
* - Almacena la operacion en la base de datos
|
||||
* - Actualiza el estado del order
|
||||
*
|
||||
* Necesita:
|
||||
* - Mas control según el codigo de error
|
||||
*/
|
||||
private generateUseCase<
|
||||
PAYLOAD,
|
||||
RESPONSETYPE extends { requestId: string }
|
||||
>(args: {
|
||||
correlation_id?: string,
|
||||
url: string,
|
||||
operation: string,
|
||||
operationPayload: PAYLOAD,
|
||||
iccid: string
|
||||
onError?: (_: any) => void
|
||||
// on code response??
|
||||
}): () => Promise<Result<string, boolean>> {
|
||||
return async () => {
|
||||
const req = this.httpClient.client.post<RESPONSETYPE>(args.url, {
|
||||
...args.operationPayload
|
||||
})
|
||||
|
||||
try {
|
||||
const response = await req;
|
||||
|
||||
if (response.status == 200) {
|
||||
assert(response.data.requestId != undefined)
|
||||
|
||||
// Creacion de la operacion inicial, antes de tener los datos
|
||||
const operation: ObjeniousOperation = {
|
||||
operation: args.operation,
|
||||
iccids: String(args.iccid),
|
||||
status: "noMassID",
|
||||
request_id: response.data.requestId,
|
||||
correlation_id: args.correlation_id
|
||||
}
|
||||
|
||||
// TODO: Esto tiene poco sentido si la operacion ya se
|
||||
// tenia que haber creado en el generador
|
||||
this.logOperation(operation)
|
||||
.then().catch(e => console.error("Error login operation", e))
|
||||
|
||||
if (args.correlation_id != undefined) {
|
||||
this.orderRepository.updateOrder({
|
||||
correlation_id: args.correlation_id!,
|
||||
new_status: "running", // Siempre es runing la primera vez que se consume
|
||||
})
|
||||
.then(e => console.log("Order actualizado: ", e))
|
||||
.catch(e => console.error("Error actualizando order", args.correlation_id))
|
||||
}
|
||||
|
||||
return <Result<string, boolean>>{
|
||||
error: undefined,
|
||||
data: true
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
error: String(response.status),
|
||||
data: undefined
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[Sim.usecase] Error ${args.operation}`, (error as AxiosError).response?.status)
|
||||
return {
|
||||
error: "Error general de la peticion",
|
||||
data: undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public activate(activationData: ActivationData): () => Promise<Result<string, boolean>> {
|
||||
const OPERATION_URL = "/actions/activateLine"
|
||||
return async () => {
|
||||
const iccid = activationData.identifier.identifiers
|
||||
// Comporbación excepcional para saber si la linea está suspendida
|
||||
const statusLinea = await this.objeniousRepository.getLinesAPI("ICCID", [String(iccid)])
|
||||
console.log("statusLinea, ", iccid, statusLinea)
|
||||
if (statusLinea.data != undefined && statusLinea.data[0].status.networkStatus == "SUSPENDED") {
|
||||
const res = await this.reActivate(activationData)()
|
||||
return res;
|
||||
}
|
||||
|
||||
const req = this.httpClient.client.post(OPERATION_URL, {
|
||||
...activationData
|
||||
dueDate: activationData.dueDate,
|
||||
identifier: activationData.identifier,
|
||||
customerAccountCode: activationData.customerAccountCode,
|
||||
offer: activationData.offer
|
||||
})
|
||||
|
||||
try {
|
||||
@@ -51,8 +151,6 @@ export class SimUseCases {
|
||||
error: undefined,
|
||||
data: true
|
||||
}
|
||||
|
||||
|
||||
} else {
|
||||
// muy mejorable el control de errores
|
||||
return {
|
||||
@@ -82,6 +180,7 @@ export class SimUseCases {
|
||||
if (resp.status == 200) {
|
||||
console.log("Sim preactivada con exito", resp.data)
|
||||
const operation: ObjeniousOperation = {
|
||||
correlation_id: preActivateData.correlation_id,
|
||||
operation: "preActivate",
|
||||
iccids: String(preActivateData.identifier.identifiers),
|
||||
status: "noMassID",
|
||||
@@ -96,38 +195,59 @@ export class SimUseCases {
|
||||
} else {
|
||||
return <Result<string, boolean>>{
|
||||
error: String(resp.status),
|
||||
data: true
|
||||
data: undefined
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error preactivacion", preActivateData)
|
||||
return <Result<string, boolean>>{
|
||||
error: "Error preactivando la sim" + preActivateData.identifier,
|
||||
data: true
|
||||
data: undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public reActivate(pauseData: ActionData): () => Promise<Result<string, boolean>> {
|
||||
public reActivate(reactivateData: ActionData): () => Promise<Result<string, boolean>> {
|
||||
const OPERATION_URL = "/actions/reactivateLine"
|
||||
return async () => {
|
||||
const req = this.httpClient.client.post(OPERATION_URL, {
|
||||
...pauseData
|
||||
...reactivateData
|
||||
})
|
||||
|
||||
try {
|
||||
const e = await req
|
||||
console.log("Sim reactivada con exito", e.data)
|
||||
return <Result<string, boolean>>{
|
||||
error: undefined,
|
||||
data: true
|
||||
const response = await req
|
||||
|
||||
// Creacion de la operacion inicial, antes de tener los datos
|
||||
const operation: ObjeniousOperation = {
|
||||
operation: "reactivate",
|
||||
iccids: reactivateData.identifier.identifiers[0],
|
||||
status: "noMassID",
|
||||
request_id: response.data.requestId,
|
||||
correlation_id: reactivateData.correlation_id
|
||||
}
|
||||
|
||||
// TODO: Esto tiene poco sentido si la operacion ya se
|
||||
// tenia que haber creado en el generador
|
||||
this.logOperation(operation)
|
||||
.then().catch(e => console.error("Error login operation", e))
|
||||
if (response.status == 200) {
|
||||
console.log("[o] Sim solicitud de reactivacion ", response.data)
|
||||
return <Result<string, boolean>>{
|
||||
error: undefined,
|
||||
data: true
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
error: String(response.status),
|
||||
data: undefined
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error reactivacion", error)
|
||||
console.error("[x] Error reactivacion", (error as AxiosError).response?.status)
|
||||
return <Result<string, boolean>>{
|
||||
error: "Error reactivando la sim" + pauseData.identifier,
|
||||
data: true
|
||||
error: "Error reactivando la sim" + reactivateData.identifier,
|
||||
data: undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -135,54 +255,189 @@ export class SimUseCases {
|
||||
|
||||
public suspend(suspendData: ActionData): () => Promise<Result<string, boolean>> {
|
||||
const OPERATION_URL = "/actions/suspendLine"
|
||||
return async () => {
|
||||
const req = this.httpClient.client.post(OPERATION_URL, {
|
||||
...suspendData
|
||||
})
|
||||
return this.generateUseCase({
|
||||
correlation_id: suspendData.correlation_id,
|
||||
operationPayload: {
|
||||
dueDate: suspendData.dueDate,
|
||||
identifier: suspendData.identifier
|
||||
},
|
||||
url: OPERATION_URL,
|
||||
iccid: suspendData.identifier.identifiers[0], //
|
||||
operation: "suspend"
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Metodo muy especifico para obtener la fecha e activacion o en su defecto
|
||||
* la actual para aber cuando se va a completar el periodo de test de una linea
|
||||
*/
|
||||
private async findActivationDate(actionData: ActionData) {
|
||||
const iccid = actionData.identifier.identifiers
|
||||
const lineData = await this.objeniousRepository.getLinesAPI("ICCID", iccid)
|
||||
let activationDate = new Date()
|
||||
// Si no se pueden sacar datos de la linea guardo momentaneamente el error
|
||||
// pero no se cancela la operacion, el error puede ser de objenious y no nos
|
||||
// puede afectar
|
||||
console.log("LineData", lineData.data)
|
||||
if (lineData.error != undefined) {
|
||||
console.error(lineData.error)
|
||||
} else {
|
||||
const activationDateStr = lineData.data[0].status.activationDate
|
||||
if (activationDateStr != undefined && activationDateStr != "") {
|
||||
activationDate = new Date(activationDateStr)
|
||||
}
|
||||
}
|
||||
return activationDate
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Paso previo a la suspension para evitar errores cuando el billing es test
|
||||
*/
|
||||
public stage_suspend(suspendData: ActionData): () => Promise<Result<string, boolean>> {
|
||||
return async (): Promise<Result<string, boolean>> => {
|
||||
const correlation_id = suspendData.correlation_id
|
||||
const iccid = suspendData.identifier.identifiers
|
||||
|
||||
|
||||
const operation: ObjeniousOperation = {
|
||||
operation: "suspend",
|
||||
iccids: iccid[0],
|
||||
status: "running",
|
||||
correlation_id: correlation_id
|
||||
}
|
||||
// No se registra hasta que no pase por la tabla de pausas
|
||||
// this.logOperation(operation)
|
||||
// .then().catch(e => console.error("Error login operation", e))
|
||||
|
||||
const fail = (error: string) => {
|
||||
console.error("[Sim.usecases]", error)
|
||||
if (correlation_id != undefined) {
|
||||
this.orderRepository.updateOrder({
|
||||
correlation_id: correlation_id,
|
||||
new_status: "failed"
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TODO REGISTRAR EL ORDER
|
||||
if (correlation_id != undefined) {
|
||||
await this.orderRepository.createOrder({
|
||||
correlation_id: correlation_id,
|
||||
order_type: "pause"
|
||||
})
|
||||
}
|
||||
|
||||
let activationDate;
|
||||
try {
|
||||
const e = await req
|
||||
console.log("Sim pausada/suspendida con exito", e.data)
|
||||
return <Result<string, boolean>>{
|
||||
error: undefined,
|
||||
data: true
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("[Pausa Use case] Error pausa")
|
||||
activationDate = await this.findActivationDate(suspendData)
|
||||
} catch (e) {
|
||||
return {
|
||||
error: "Error general pausando/suspendiendo la sim" + suspendData.identifier,
|
||||
data: undefined
|
||||
error: String(e)
|
||||
}
|
||||
}
|
||||
const newTask: CreatePauseCancelTaskDTO = {
|
||||
iccid: iccid[0],
|
||||
activation_date: activationDate,
|
||||
next_check: undefined, // Que se haga instantaneamente al ser la primera
|
||||
operation_type: "suspend",
|
||||
action_data: suspendData
|
||||
}
|
||||
|
||||
const taskCreated = await this.pauseRepository.addTask(newTask)
|
||||
|
||||
// Caso que la task no se pueda crear en la BDD
|
||||
if (taskCreated.error != undefined) {
|
||||
fail(taskCreated.error)
|
||||
return {
|
||||
error: taskCreated.error
|
||||
}
|
||||
}
|
||||
|
||||
// Caso que se haya creado en la BDD
|
||||
if (correlation_id != undefined) {
|
||||
this.orderRepository.updateOrder({
|
||||
correlation_id: correlation_id,
|
||||
new_status: "running"
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
data: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public terminate(terminationData: ActionData): () => Promise<Result<string, boolean>> {
|
||||
const OPERATION_URL = "/actions/terminateLine"
|
||||
return async () => {
|
||||
const req = this.httpClient.client.post(OPERATION_URL, {
|
||||
...terminationData
|
||||
})
|
||||
/**
|
||||
* Paso previo a la suspension para evitar errores cuando el billing es test
|
||||
*/
|
||||
public stage_terminate(terminateData: ActionData): () => Promise<Result<string, boolean>> {
|
||||
return async (): Promise<Result<string, boolean>> => {
|
||||
const correlation_id = terminateData.correlation_id
|
||||
const iccid = terminateData.identifier.identifiers[0]
|
||||
|
||||
// TODO: para cuando estemos listos.
|
||||
throw new Error("Peticion no reversible desactivada de momento")
|
||||
const activationDate = await this.findActivationDate(terminateData)
|
||||
const newTask: CreatePauseCancelTaskDTO = {
|
||||
iccid: iccid,
|
||||
activation_date: activationDate,
|
||||
next_check: undefined, // Que se haga instantaneamente al ser la primera
|
||||
operation_type: "terminate",
|
||||
action_data: terminateData
|
||||
}
|
||||
|
||||
try {
|
||||
const e = await req
|
||||
console.log("Sim cancelada con exito", e.data)
|
||||
return <Result<string, boolean>>{
|
||||
error: undefined,
|
||||
data: true
|
||||
const taskCreated = await this.pauseRepository.addTask(newTask)
|
||||
|
||||
const operation: ObjeniousOperation = {
|
||||
operation: "terminate",
|
||||
iccids: iccid,
|
||||
status: "running",
|
||||
correlation_id: correlation_id
|
||||
}
|
||||
|
||||
/**
|
||||
this.logOperation(operation)
|
||||
.then().catch(e => console.error("Error login operation", e))
|
||||
*/
|
||||
// Caso que la task no se pueda crear en la BDD
|
||||
if (taskCreated.error != undefined) {
|
||||
console.error("[Sim.usecases]", taskCreated.error)
|
||||
if (correlation_id != undefined) {
|
||||
this.orderRepository.updateOrder({
|
||||
correlation_id: correlation_id,
|
||||
new_status: "failed"
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error pausa", error)
|
||||
return <Result<string, boolean>>{
|
||||
error: "Error cancelando/terminate la sim" + terminationData.identifier,
|
||||
data: undefined
|
||||
return {
|
||||
error: taskCreated.error
|
||||
}
|
||||
}
|
||||
|
||||
// Caso que se haya creado en la BDD
|
||||
if (correlation_id != undefined) {
|
||||
this.orderRepository.updateOrder({
|
||||
correlation_id: correlation_id,
|
||||
new_status: "running"
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
data: true
|
||||
}
|
||||
}
|
||||
}
|
||||
public terminate(terminationData: ActionData): () => Promise<Result<string, boolean>> {
|
||||
const OPERATION_URL = "/actions/terminateLine"
|
||||
return this.generateUseCase({
|
||||
correlation_id: terminationData.correlation_id,
|
||||
operationPayload: {
|
||||
dueDate: terminationData.dueDate,
|
||||
identifier: terminationData.identifier
|
||||
},
|
||||
url: OPERATION_URL,
|
||||
iccid: terminationData.identifier.identifiers[0], //
|
||||
operation: "terminate"
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -12,8 +12,8 @@ export const env = {
|
||||
POSTGRES_HOST: process.env.POSTGRES_HOST,
|
||||
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE,
|
||||
RABBITMQ_HOST: String(process.env.RABBITMQ_HOST ?? "localhost"),
|
||||
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "guest"),
|
||||
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "guest"),
|
||||
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "test"),
|
||||
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "test"),
|
||||
RABBITMQ_EXCHANGE: String(process.env.RABBITMQ_EXCHANGE ?? "/"),
|
||||
RABBITMQ_PORT: parseInt(process.env.RABBITMQ_PORT ?? "5672"),
|
||||
RABBITMQ_MODULENAME: process.env.MODULENAME,
|
||||
@@ -28,7 +28,7 @@ export const env = {
|
||||
OBJ_CLI_ASSERTION: String(process.env.OBJ_CLI_ASSERTION),
|
||||
OBJ_CLIENT_ID: String(process.env.OBJ_CLIENT_ID),
|
||||
OBJ_KID: String(process.env.OBJ_KID),
|
||||
OBJ_BASE_URL: String(process.env.OBJ_BASE_URL)
|
||||
|
||||
OBJ_BASE_URL: String(process.env.OBJ_BASE_URL),
|
||||
OBJ_CUSTOMER_CODE: String(process.env.OBJ_CUSTOMER_CODE)
|
||||
};
|
||||
|
||||
|
||||
@@ -27,8 +27,8 @@ export const rabbitmqEventBus = new RabbitMQEventBus({
|
||||
async function buildQueues(channel: Channel) {
|
||||
const QUEUES = {
|
||||
OBJ: "sim.objenious",
|
||||
DLX: "sim.objenious.dlx",
|
||||
DEL: "sim.objenious.delayed"
|
||||
OBJDLX: "sim.objenious.dlx",
|
||||
OBJDEL: "sim.objenious.delayed",
|
||||
}
|
||||
|
||||
const EXCHANGES = {
|
||||
@@ -45,8 +45,8 @@ async function buildQueues(channel: Channel) {
|
||||
await channel.assertExchange(EXCHANGES.MAIN, "topic")
|
||||
|
||||
await channel.assertQueue(QUEUES.OBJ)
|
||||
await channel.assertQueue(QUEUES.DLX)
|
||||
await channel.assertQueue(QUEUES.DEL, {
|
||||
await channel.assertQueue(QUEUES.OBJDLX)
|
||||
await channel.assertQueue(QUEUES.OBJDEL, {
|
||||
durable: true,
|
||||
arguments: {
|
||||
'x-message-ttl': DELAY,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js"
|
||||
import { JWTService } from "../aplication/JWT.service.js"
|
||||
import { env } from "./env/index.js"
|
||||
import { jwtService } from "./jwtService.config.js"
|
||||
|
||||
const OBJ_BASE_URL = env.OBJ_BASE_URL
|
||||
|
||||
@@ -9,5 +9,5 @@ export const httpInstance = new HttpClient({
|
||||
headers: {
|
||||
"content-type": " application/json; charset=utf-8"
|
||||
},
|
||||
jwtManager: new JWTService()
|
||||
jwtManager: jwtService
|
||||
})
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
import { GrantAccessRequestBody, JWTService } from "sim-shared/aplication/JWT.service.js"
|
||||
import { env } from "./env/index.js"
|
||||
import { JWTHeader } from "sim-shared/domain/JWT.js"
|
||||
|
||||
|
||||
const PRIVATE_KEY_PATH = env.OBJ_PEM_PATH
|
||||
|
||||
const GET_TOKEN_URL = "https://idp.docapost.io/auth/realms/GETWAY/protocol/openid-connect/token"
|
||||
const REFRESH_TOKEN_URL = GET_TOKEN_URL
|
||||
|
||||
const DEFAULT_BODY: GrantAccessRequestBody = {
|
||||
grant_type: "client_credentials",
|
||||
client_id: env.OBJ_CLIENT_ID,
|
||||
client_assertion_type: "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
|
||||
client_assertion: env.OBJ_CLI_ASSERTION
|
||||
}
|
||||
|
||||
|
||||
const DEFAULT_HEADERS = {
|
||||
"content-type": "application/x-www-form-urlencoded"
|
||||
}
|
||||
|
||||
const DEFAULT_HEADERS_JWT = {
|
||||
alg: "RS256",
|
||||
typ: "JWT",
|
||||
kid: env.OBJ_KID,
|
||||
}
|
||||
|
||||
const DEFAULT_DATA_JWT = {
|
||||
sub: env.OBJ_CLIENT_ID,
|
||||
iss: env.OBJ_CLIENT_ID,
|
||||
aud: "https://idp.docapost.io/auth/realms/GETWAY",
|
||||
jti: Date.now().toString(),
|
||||
|
||||
}
|
||||
|
||||
function addIATHeaders(authHeaders: Object) {
|
||||
const headers = <JWTHeader>{
|
||||
...authHeaders,
|
||||
sub: env.OBJ_CLIENT_ID,
|
||||
iss: env.OBJ_CLIENT_ID,
|
||||
aud: GET_TOKEN_URL,
|
||||
jti: Date.now().toString(),
|
||||
iat: Math.floor(Date.now() / 1000),
|
||||
exp: Math.floor(Date.now() / 1000) + 5 * 60,
|
||||
}
|
||||
return headers
|
||||
}
|
||||
|
||||
export const jwtService = new JWTService({
|
||||
transformJWTHeaders: addIATHeaders,
|
||||
defaultHeaders: DEFAULT_HEADERS,
|
||||
defaultBody: DEFAULT_BODY,
|
||||
defaultJWTHeaders: DEFAULT_HEADERS_JWT,
|
||||
defaultJWTPayload: DEFAULT_DATA_JWT,
|
||||
privateKeyPath: PRIVATE_KEY_PATH,
|
||||
tokenUrl: GET_TOKEN_URL,
|
||||
refreshTokenUrl: REFRESH_TOKEN_URL
|
||||
})
|
||||
@@ -1,5 +1,6 @@
|
||||
|
||||
export type ActionData = {
|
||||
correlation_id?: string;
|
||||
dueDate: string, // isodate
|
||||
filter?: {} // no se si hace falta
|
||||
identifier: {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
import { OperationsRepository } from "sim-shared/infrastructure/OperationRepository.js"
|
||||
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js"
|
||||
import { startRMQClient } from "#config/eventBus.config.js"
|
||||
import { httpInstance } from "#config/httpClient.config.js"
|
||||
import { pgPool } from "#config/postgreConfig.js"
|
||||
@@ -7,6 +7,8 @@ import { PgClient } from "sim-shared/infrastructure/PgClient.js"
|
||||
import { SimUseCases } from "./aplication/Sim.usecases.js"
|
||||
import { SimController } from "./aplication/Sim.controller.js"
|
||||
import { SimRouter } from "./aplication/Sim.router.js"
|
||||
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
|
||||
import { PauseCancelTaskRepository } from "#adapters/PauseCancelTaskRepository.js"
|
||||
|
||||
async function startWorker() {
|
||||
const rmqClient = await startRMQClient()
|
||||
@@ -17,13 +19,21 @@ async function startWorker() {
|
||||
|
||||
await pgClient.checkDatabaseConnection()
|
||||
|
||||
const operationRepository = new OperationsRepository(pgClient)
|
||||
const operationRepository = new ObjeniousOperationsRepository(
|
||||
httpClient,
|
||||
pgClient,
|
||||
)
|
||||
const orderRepository = new OrderRepository(pgClient)
|
||||
|
||||
const pauseRepository = new PauseCancelTaskRepository(pgClient)
|
||||
|
||||
const simActivationController = new SimController(
|
||||
rmqClient,
|
||||
new SimUseCases({
|
||||
httpClient: httpClient,
|
||||
operationRepository: operationRepository
|
||||
operationRepository: operationRepository,
|
||||
orderRepository: orderRepository,
|
||||
pauseRepository: pauseRepository
|
||||
})
|
||||
)
|
||||
const simRouter = new SimRouter(simActivationController, rmqClient)
|
||||
|
||||
@@ -0,0 +1,72 @@
|
||||
import { after, before, describe, it } from "node:test";
|
||||
import { CreatePauseCancelTaskDTO, PauseCancelTaskRepository } from "./PauseCancelTaskRepository.js";
|
||||
import { postgrClient } from "#config/postgreConfig.js";
|
||||
import assert from "node:assert";
|
||||
|
||||
const testTask: CreatePauseCancelTaskDTO = {
|
||||
iccid: "1234",
|
||||
operation_type: "suspend",
|
||||
activation_date: new Date(),
|
||||
next_check: new Date(),
|
||||
action_data: {
|
||||
dueDate: new Date().toString(),
|
||||
correlation_id: "12223",
|
||||
identifier: {
|
||||
identifiers: ["1234"],
|
||||
identifierType: "ICCID"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
describe("Test PauseCancelTaskRepository - DB", () => {
|
||||
|
||||
const createdIds: number[] = [];
|
||||
const pauseRepo = new PauseCancelTaskRepository(postgrClient)
|
||||
|
||||
before(() => {
|
||||
})
|
||||
|
||||
after(() => {
|
||||
})
|
||||
|
||||
it("Should create a task", async () => {
|
||||
const created = await pauseRepo.addTask(testTask)
|
||||
assert.ok(created != undefined, "A value must be returned always")
|
||||
assert.ok(created.error == undefined, "Should not return a error")
|
||||
assert.ok(created.data != undefined, "Data must be returned")
|
||||
createdIds.push(created.data.id)
|
||||
})
|
||||
|
||||
it("Should update a existing task", async () => {
|
||||
const updated = await pauseRepo.updateTask({
|
||||
id: createdIds[0],
|
||||
next_check: new Date()
|
||||
})
|
||||
|
||||
assert.ok(updated != undefined, "A value must be returned always")
|
||||
assert.ok(updated.error == undefined, "Should not return a error")
|
||||
assert.ok(updated.data != undefined, "Data must be returned")
|
||||
})
|
||||
|
||||
it("Should finish a existing task", async () => {
|
||||
const finish = await pauseRepo.finishTask({
|
||||
id: createdIds[0],
|
||||
error: "ok"
|
||||
})
|
||||
|
||||
assert.ok(finish != undefined, "A value must be returned always")
|
||||
assert.ok(finish.error == undefined, "Should not return a error")
|
||||
assert.ok(finish.data != undefined, "Data must be returned")
|
||||
})
|
||||
|
||||
it("Should get at least 1 pending task", async () => {
|
||||
const created = await pauseRepo.addTask(testTask)
|
||||
const pending = await pauseRepo.getPending()
|
||||
|
||||
assert.ok(pending != undefined, "A value must be returned always")
|
||||
assert.ok(pending.error == undefined, "Should not return a error")
|
||||
assert.ok(pending.data != undefined, "Data must be returned")
|
||||
|
||||
console.log("--> ", pending.data[0])
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,126 @@
|
||||
import { Result } from "sim-shared/domain/Result.js";
|
||||
import { QueryResult } from "pg";
|
||||
import { PgClient } from "sim-shared/infrastructure/PgClient.js";
|
||||
import { AxiosError } from "axios";
|
||||
import { ActionData } from "#domain/DTOs/objeniousapi.js";
|
||||
|
||||
export type PauseCancelTask = {
|
||||
id: number;
|
||||
iccid: string;
|
||||
operation_type: "suspend" | "terminate",
|
||||
last_checked?: Date | null;
|
||||
activation_date?: Date | null;
|
||||
next_check?: Date | null;
|
||||
completed_date?: Date | null;
|
||||
error?: string | null;
|
||||
action_data: ActionData
|
||||
}
|
||||
|
||||
export type CreatePauseCancelTaskDTO = Pick<PauseCancelTask, "iccid" | "activation_date" | "next_check" | "operation_type" | "action_data">
|
||||
export type UpdatePauseCancelTaskDTO = Pick<PauseCancelTask, "id" | "next_check">
|
||||
export type FinishPauseCancelTaskDTO = Pick<PauseCancelTask, "id" | "error">
|
||||
|
||||
/**
|
||||
* Repositorio para compensar los problemas de cacelcaiones/pausas de objenious a
|
||||
* la hora aplicarlo sobre una linea con el billing a test.
|
||||
*/
|
||||
export class PauseCancelTaskRepository {
|
||||
constructor(
|
||||
private readonly pgClient: PgClient
|
||||
) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtiene las siguientes que se pueden lanzar, puede haber más pero
|
||||
* estan pendientes
|
||||
*/
|
||||
public async getPending(): Promise<Result<string, PauseCancelTask[]>> {
|
||||
const sql = `
|
||||
SELECT * FROM pause_cancel_tasks
|
||||
WHERE completed_date IS NULL
|
||||
AND (next_check <= NOW() OR next_check IS NULL)
|
||||
ORDER BY id ASC;
|
||||
`;
|
||||
|
||||
try {
|
||||
const res: QueryResult<PauseCancelTask> = await this.pgClient.query(sql);
|
||||
return {
|
||||
data: res.rows
|
||||
}
|
||||
} catch (e) {
|
||||
return {
|
||||
error: (e as AxiosError).message
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async addTask(task: CreatePauseCancelTaskDTO): Promise<Result<string, PauseCancelTask>> {
|
||||
|
||||
const sql = `
|
||||
INSERT INTO pause_cancel_tasks (iccid, activation_date, next_check, last_checked, operation_type, action_data)
|
||||
VALUES ($1, $2, $3, now(), $4, $5)
|
||||
RETURNING *;
|
||||
`;
|
||||
try {
|
||||
const values = [task.iccid, task.activation_date, task.next_check, task.operation_type, JSON.stringify(task.action_data)];
|
||||
const res: QueryResult<PauseCancelTask> = await this.pgClient.query(sql, values);
|
||||
return {
|
||||
data: res.rows[0]
|
||||
}
|
||||
} catch (e) {
|
||||
return {
|
||||
error: (e as AxiosError).message
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Se ha vuelto a comprobar la tarea pero sigue en test
|
||||
*/
|
||||
public async updateTask(updateData: UpdatePauseCancelTaskDTO): Promise<Result<string, PauseCancelTask>> {
|
||||
|
||||
const sql = `
|
||||
UPDATE pause_cancel_tasks
|
||||
SET last_checked = now(), next_check = $1
|
||||
WHERE id = $2
|
||||
RETURNING *;
|
||||
`;
|
||||
try {
|
||||
const res = await this.pgClient.query<PauseCancelTask>(sql, [updateData.next_check, updateData.id]);
|
||||
return {
|
||||
data: res.rows[0]
|
||||
}
|
||||
} catch (e) {
|
||||
return {
|
||||
error: (e as AxiosError).message
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* La tarea ha termiando bien o mal
|
||||
*/
|
||||
public async finishTask(finishData: FinishPauseCancelTaskDTO) {
|
||||
const sql = `
|
||||
UPDATE pause_cancel_tasks
|
||||
SET completed_date = NOW(), error = $1
|
||||
WHERE id = $2
|
||||
RETURNING *;
|
||||
`;
|
||||
|
||||
try {
|
||||
const res = await this.pgClient.query(sql, [finishData.error, finishData.id]);
|
||||
return {
|
||||
data: res.rows[0]
|
||||
}
|
||||
} catch (e) {
|
||||
return {
|
||||
error: (e as AxiosError).message
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -53,7 +53,7 @@
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"test": "node --import tsx --test ./**/*.test.ts",
|
||||
"dev": "tsx watch index.ts",
|
||||
"build": "tsc --build && yarn tsc-alias -p tsconfig.json && cp .env package.json ../../dist/packages/sim-consumidor-objenious/",
|
||||
"start": "node ../../dist/packages/sim-consumidor-objenious/index.js",
|
||||
@@ -68,7 +68,6 @@
|
||||
"cors": "*",
|
||||
"dotenv": "*",
|
||||
"express": "*",
|
||||
"sim-consumidor-objenious": "sim-consumidor-objenious:*",
|
||||
"sim-shared": "sim-shared:*",
|
||||
"typescript": "*"
|
||||
},
|
||||
|
||||
127
packages/sim-entrada-eventos/aplication/Order.controller.ts
Normal file
127
packages/sim-entrada-eventos/aplication/Order.controller.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import { BodyValidator } from "sim-shared/aplication/BodyValidator.js"
|
||||
import { OrderUsecases } from "./Order.usecases.js"
|
||||
import { Request, Response } from "express"
|
||||
import { PaginationArgs } from "#domain/common.js"
|
||||
import { idValidator, uuidValidator } from "./httpValidators.js"
|
||||
|
||||
export class OrderController {
|
||||
private orderUseCases: OrderUsecases
|
||||
|
||||
constructor(args: {
|
||||
orderUseCases: OrderUsecases
|
||||
}) {
|
||||
this.orderUseCases = args.orderUseCases
|
||||
}
|
||||
|
||||
public getById() {
|
||||
return this.controllerGenerator<{ id: number }, { id: number }>({
|
||||
validator: idValidator,
|
||||
useCase: this.orderUseCases.getById(),
|
||||
onError: (data, error) => { console.error(error) },
|
||||
onSuccess: (data) => console.log(data)
|
||||
})
|
||||
}
|
||||
|
||||
public getPending() {
|
||||
return this.controllerGenerator<PaginationArgs, PaginationArgs>({
|
||||
validator: undefined,
|
||||
useCase: this.orderUseCases.getPending(),
|
||||
onError: (data, error) => { console.error(error) },
|
||||
onSuccess: (data) => console.log(data)
|
||||
})
|
||||
}
|
||||
|
||||
public getByQueueId() {
|
||||
return this.controllerGenerator<{ correlation_id: string }, { correlation_id: string }>({
|
||||
validator: uuidValidator,
|
||||
useCase: this.orderUseCases.getByQueueId(),
|
||||
onError: (data, error) => { console.error(error) },
|
||||
onSuccess: (data) => console.log(data)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* TODO:
|
||||
* - En proceso de validacion, tiene varios problemas
|
||||
* - Está copiado, planteado inyectarlo
|
||||
* - Map para la respuesta?
|
||||
*
|
||||
* Abstrae el proceso de
|
||||
* Peticion -> validacion del body -> map del body -> useCase -> OK/ERR
|
||||
*
|
||||
* <O> Representa el dato original
|
||||
* <P> Representa el dato después del mapeo
|
||||
*/
|
||||
public controllerGenerator<O extends object, P extends object>(args: {
|
||||
validator?: BodyValidator<O>,
|
||||
mapBody?: (body: O) => P,
|
||||
useCase: (args: P) => Promise<any>,
|
||||
onError: (args: O | P, error: string) => void,
|
||||
onSuccess: (args: P) => void,
|
||||
}) {
|
||||
return async (req: Request, res: Response) => {
|
||||
//scketchy
|
||||
const body = { ...req.body, ...req.params }
|
||||
|
||||
// 1. Validacion del body
|
||||
try {
|
||||
if (args.validator != undefined)
|
||||
args.validator.validate(body)
|
||||
} catch (e) {
|
||||
if (args.onError != undefined) args.onError(body, e as string)
|
||||
res.status(422).json({
|
||||
errors: {
|
||||
msg: e
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Transformacion del body
|
||||
let data: P = body;
|
||||
try {
|
||||
if (args.mapBody != undefined)
|
||||
data = args.mapBody(body)
|
||||
} catch (e) {
|
||||
res.status(422).json({
|
||||
errors: {
|
||||
msg: "Error parseando el body: " + e
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// 3. Aplicacion del UseCase
|
||||
try {
|
||||
const usecaseResult = await args.useCase(data)
|
||||
|
||||
// 4.1 Se devuelve el caso de exito pero no encontrado
|
||||
if (usecaseResult.data == undefined && usecaseResult.error == undefined) {
|
||||
res.status(404).json(usecaseResult).send()
|
||||
args.onSuccess(data)
|
||||
return;
|
||||
}
|
||||
|
||||
// 4.2 Caso de error controlado desde el caso de uso
|
||||
if (usecaseResult.error != undefined) {
|
||||
res.status(500).json(usecaseResult).send()
|
||||
return;
|
||||
}
|
||||
|
||||
// 4.2 Se devuelve al usuario el caso de exito de encontrado
|
||||
res.status(200).json(
|
||||
usecaseResult
|
||||
).send()
|
||||
args.onSuccess(data)
|
||||
} catch (err) {
|
||||
// 4.3 Error del caso de uso
|
||||
res.status(500).json({
|
||||
errors: {
|
||||
msg: "Error general:" + err
|
||||
}
|
||||
}).send()
|
||||
return;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
39
packages/sim-entrada-eventos/aplication/Order.usecases.ts
Normal file
39
packages/sim-entrada-eventos/aplication/Order.usecases.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { PaginationArgs } from "#domain/common.js";
|
||||
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
|
||||
|
||||
|
||||
export class OrderUsecases {
|
||||
private orderRepository: OrderRepository;
|
||||
constructor(args: {
|
||||
orderRepository: OrderRepository
|
||||
}
|
||||
) {
|
||||
this.orderRepository = args.orderRepository
|
||||
}
|
||||
|
||||
public getById() {
|
||||
return async (args: {
|
||||
id: number
|
||||
}) => {
|
||||
const order = await this.orderRepository.getOrderById(args)
|
||||
return order
|
||||
}
|
||||
}
|
||||
|
||||
public getByQueueId() {
|
||||
return async (args: {
|
||||
correlation_id: string
|
||||
}) => {
|
||||
const order = await this.orderRepository.getOrderByQueueId(args)
|
||||
return order
|
||||
}
|
||||
}
|
||||
|
||||
public getPending() {
|
||||
return async (args: PaginationArgs & {
|
||||
}) => {
|
||||
return await this.orderRepository.getPendingOrders(args)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,164 +1,195 @@
|
||||
import { Request, Response } from "express"
|
||||
import { SimUsecases } from "./Sim.usecases.js"
|
||||
import { activationValidator, iccidValidator } from "./httpValidators.js"
|
||||
import { companyFromIccid } from "#domain/companies.js"
|
||||
import { BodyValidator } from "sim-shared/aplication/BodyValidator.js"
|
||||
import { tryCatch } from "packages/sim-shared/domain/Result.js"
|
||||
|
||||
// Partiendo del caracter 3 2 de pais + 2 de compañia
|
||||
// Metiendolo a la BDD podria ser mas dinamico pero perderia
|
||||
// tiempo de query
|
||||
// Puede que esté bien crear un endpoint para administrarlo
|
||||
const COMPAÑIASICCID = new Map<string, string>(
|
||||
[
|
||||
["3490", "alai"],
|
||||
["3510", "nos"],
|
||||
["3320", "objenious"]
|
||||
])
|
||||
|
||||
export class SimController {
|
||||
private simUseCases: SimUsecases
|
||||
|
||||
constructor(args: {
|
||||
simUseCases: SimUsecases
|
||||
simUseCases: SimUsecases,
|
||||
}) {
|
||||
this.simUseCases = args.simUseCases
|
||||
|
||||
this.activation = this.activation.bind(this)
|
||||
}
|
||||
|
||||
public preactivation() {
|
||||
/**
|
||||
* TODO:
|
||||
* En proceso, tiene varios problemas
|
||||
*
|
||||
* Abstrae el proceso de
|
||||
* Peticion -> validacion del body -> map del body -> useCase -> OK/ERR
|
||||
*
|
||||
* <O> Representa el dato original
|
||||
* <P> Representa el dato después del mapeo
|
||||
*/
|
||||
public controllerGenerator<O extends Object, P extends Object>(args: {
|
||||
validator?: BodyValidator<O>,
|
||||
mapBody?: (body: O) => P,
|
||||
useCase: (args: P) => Promise<any>,
|
||||
onError: (args: O | P, error: string) => void,
|
||||
onSuccess: (args: P) => void,
|
||||
}) {
|
||||
return async (req: Request, res: Response) => {
|
||||
const valido = this.validateBody(req.body, res)
|
||||
if (valido == false) return;
|
||||
const body = req.body
|
||||
// 1. Validacion del body
|
||||
if (args.validator != undefined) {
|
||||
const validationResult = args.validator.validate(body)
|
||||
if (validationResult.error != undefined) {
|
||||
res.status(422).json({
|
||||
errors: {
|
||||
...validationResult.error
|
||||
}
|
||||
})
|
||||
args.onError(body, validationResult.error.msg)
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
const { iccid } = req.body
|
||||
const compañia = this.compañiaFromIccid(iccid)
|
||||
|
||||
if (compañia == undefined) {
|
||||
res.status(500).json({
|
||||
// 2. Transformacion del body
|
||||
// TODO: sustituir el try cach
|
||||
let data: P = body;
|
||||
try {
|
||||
if (args.mapBody != undefined)
|
||||
data = args.mapBody(body)
|
||||
} catch (e) {
|
||||
res.status(422).json({
|
||||
errors: {
|
||||
msg: "El iccid no pertenece a una compañia conocida"
|
||||
msg: "Error parseando el body: " + e
|
||||
}
|
||||
})
|
||||
return;
|
||||
args.onError(body, String(e))
|
||||
return 1;
|
||||
}
|
||||
|
||||
// 3. Aplicacion del UseCase
|
||||
// TODO: todos los use cases tienen que pasar a devolver un Result<>
|
||||
const usecaseResult = await args.useCase(data) // no deberia hacer falta el trycatch
|
||||
|
||||
try {
|
||||
await this.simUseCases.preActivation({ iccid, compañia })
|
||||
|
||||
res.status(200).json({
|
||||
iccid: iccid,
|
||||
operation: "activation"
|
||||
}).send()
|
||||
} catch (err) {
|
||||
console.error("Error activando la sim ", req.body)
|
||||
// 4. Casos de error del usecase
|
||||
if (usecaseResult.error != undefined) {
|
||||
// 4.1 Error del caso de uso
|
||||
res.status(500).json({
|
||||
errors: {
|
||||
msg: "Error general de activation"
|
||||
...usecaseResult.error
|
||||
}
|
||||
}).send()
|
||||
return;
|
||||
args.onError(body, usecaseResult.error ?? "Error indefinido")
|
||||
return 1;
|
||||
}
|
||||
|
||||
// 5. Se devuelve al usuario el caso de exito
|
||||
res.status(200).json(
|
||||
usecaseResult.data
|
||||
).send()
|
||||
args.onSuccess(usecaseResult.data)
|
||||
return 0;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public test() {
|
||||
return this.controllerGenerator<{ iccid: string, offer: string }, { iccid: string }>({
|
||||
validator: iccidValidator,
|
||||
useCase: (args) => this.simUseCases.test(args),
|
||||
onError: (data, error) => console.error(error),
|
||||
onSuccess: (data) => {
|
||||
console.log("OK", data)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
public preactivation() {
|
||||
return this.controllerGenerator<{ iccid: string, offer: string }, { iccid: string, offer: string, compañia: string }>({
|
||||
validator: activationValidator,
|
||||
mapBody: (b) => {
|
||||
const { iccid, offer } = b
|
||||
const compañia = companyFromIccid(iccid)
|
||||
return { iccid, compañia, offer }
|
||||
},
|
||||
useCase: (args) => this.simUseCases.preActivation(args),
|
||||
onError: (d, e) => console.error("[x] Error preactivation: ", d, e),
|
||||
onSuccess: console.log
|
||||
})
|
||||
}
|
||||
|
||||
public activation() {
|
||||
return async (req: Request, res: Response) => {
|
||||
const valido = this.validateBody(req.body, res)
|
||||
|
||||
if (valido == false) return; // Si no es valido ya se ha enviado el error
|
||||
|
||||
const { iccid, offer } = req.body
|
||||
|
||||
const compañia = this.compañiaFromIccid(iccid)
|
||||
|
||||
if (compañia == undefined) {
|
||||
res.status(500).json({
|
||||
errors: {
|
||||
msg: "El iccid no pertenece a una compañia conocida"
|
||||
}
|
||||
})
|
||||
return;
|
||||
}
|
||||
return this.controllerGenerator<{ iccid: string, offer: string }, { iccid: string, offer: string, compañia: string }>({
|
||||
validator: activationValidator,
|
||||
mapBody: (b) => {
|
||||
const { iccid, offer } = b
|
||||
const compañia = companyFromIccid(iccid)
|
||||
return { iccid, compañia, offer }
|
||||
},
|
||||
useCase: (args) => this.simUseCases.activation(args),
|
||||
onError: (d, e) => console.error("[x] Error activacion: ", d, e),
|
||||
onSuccess: console.log
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
await this.simUseCases.activation({ iccid, compañia, offer })
|
||||
|
||||
res.status(200).json({
|
||||
iccid: iccid,
|
||||
operation: "activation"
|
||||
}).send()
|
||||
} catch (err) {
|
||||
console.error("Error activando la sim ", req.body)
|
||||
res.status(500).json({
|
||||
errors: {
|
||||
msg: "Error general de activation"
|
||||
}
|
||||
}).send()
|
||||
return;
|
||||
}
|
||||
}
|
||||
public reActivation() {
|
||||
return this.controllerGenerator<{ iccid: string, offer: string }, { iccid: string, offer: string, compañia: string }>({
|
||||
validator: iccidValidator,
|
||||
mapBody: (b) => {
|
||||
const { iccid, offer } = b
|
||||
const compañia = companyFromIccid(iccid)
|
||||
return { iccid, compañia, offer }
|
||||
},
|
||||
useCase: (args) => this.simUseCases.reActivation(args),
|
||||
onError: (d, e) => console.error("[x] Error reactivacion: ", d, e),
|
||||
onSuccess: console.log
|
||||
})
|
||||
}
|
||||
|
||||
public cancelation() {
|
||||
return async (req: Request, res: Response) => {
|
||||
const valido = this.validateBody(req.body, res)
|
||||
|
||||
if (valido == false) return; // Si no es valido ya se ha enviado el error
|
||||
|
||||
const { iccid } = req.body
|
||||
const compañia = this.compañiaFromIccid(iccid)
|
||||
|
||||
try {
|
||||
await this.simUseCases.cancelation({ iccid, compañia })
|
||||
res.status(200).json({
|
||||
iccid: iccid,
|
||||
operation: "cancelation"
|
||||
})
|
||||
} catch (err) {
|
||||
console.error("Error cancelando la sim ", req.body)
|
||||
res.status(500).json({
|
||||
errors: {
|
||||
msg: "Error general de cancelacion"
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
return this.controllerGenerator<{ iccid: string }, { iccid: string, compañia: string }>({
|
||||
validator: iccidValidator,
|
||||
mapBody: (b) => {
|
||||
const { iccid } = b
|
||||
const compañia = companyFromIccid(iccid)
|
||||
return { iccid, compañia }
|
||||
},
|
||||
useCase: (args) => this.simUseCases.cancelation(args),
|
||||
// TODO: Meter en los mensajes el nombre de la operacion
|
||||
onError: (d, e) => console.error("[x] Error cancelacion: ", d, e),
|
||||
onSuccess: console.log
|
||||
})
|
||||
}
|
||||
|
||||
public pause() {
|
||||
return async (req: Request, res: Response) => {
|
||||
const valido = this.validateBody(req.body, res)
|
||||
return this.controllerGenerator<{ iccid: string }, { iccid: string, compañia: string }>({
|
||||
validator: iccidValidator,
|
||||
mapBody: (b) => {
|
||||
const { iccid } = b
|
||||
const compañia = companyFromIccid(iccid)
|
||||
return { iccid, compañia }
|
||||
},
|
||||
useCase: (args) => this.simUseCases.pause(args),
|
||||
onError: (d, e) => console.error("[x] Error pausa: ", d, e),
|
||||
onSuccess: console.log
|
||||
})
|
||||
|
||||
if (valido == false) return; // Si no es valido ya se ha enviado el error
|
||||
|
||||
const { iccid } = req.body
|
||||
const compañia = this.compañiaFromIccid(iccid)
|
||||
|
||||
try {
|
||||
await this.simUseCases.pause({ iccid, compañia })
|
||||
res.status(200).json({
|
||||
iccid: iccid,
|
||||
operation: "cancelation"
|
||||
})
|
||||
} catch (err) {
|
||||
console.error("Error pausando la sim ", req.body)
|
||||
res.status(500).json({
|
||||
errors: {
|
||||
msg: "Error pausando la sim"
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public free() {
|
||||
return async (req: Request, res: Response) => {
|
||||
const valido = this.validateBody(req.body, res)
|
||||
|
||||
if (valido == false) return; // Si no es valido ya se ha enviado el error
|
||||
try {
|
||||
iccidValidator.validate(req.body)
|
||||
} catch (e) {
|
||||
res.status(422).json({
|
||||
errors: {
|
||||
msg: e
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const { iccid } = req.body
|
||||
const compañia = this.compañiaFromIccid(iccid)
|
||||
const compañia = companyFromIccid(iccid)
|
||||
|
||||
try {
|
||||
await this.simUseCases.cancelation({ iccid, compañia })
|
||||
@@ -178,14 +209,19 @@ export class SimController {
|
||||
}
|
||||
|
||||
public save() {
|
||||
|
||||
return async (req: Request, res: Response) => {
|
||||
const valido = this.validateBody(req.body, res)
|
||||
|
||||
if (valido == false) return; // Si no es valido ya se ha enviado el error
|
||||
try {
|
||||
iccidValidator.validate(req.body)
|
||||
} catch (e) {
|
||||
res.status(422).json({
|
||||
errors: {
|
||||
msg: e
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const { iccid } = req.body
|
||||
const compañia = this.compañiaFromIccid(iccid)
|
||||
const compañia = companyFromIccid(iccid)
|
||||
|
||||
try {
|
||||
await this.simUseCases.cancelation({ iccid, compañia })
|
||||
@@ -203,40 +239,4 @@ export class SimController {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private validateBody(body: any, res: Response) {
|
||||
const { iccid } = body
|
||||
let errors = {}
|
||||
let valid = true
|
||||
|
||||
if (iccid == undefined) {
|
||||
res.status(400)
|
||||
|
||||
errors = {
|
||||
...errors,
|
||||
iccid: "El iccid es undefined"
|
||||
}
|
||||
valid = false
|
||||
}
|
||||
|
||||
if (valid == false) {
|
||||
res.json({
|
||||
errors: errors
|
||||
})
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
/**
|
||||
* A partir del iccid completo devuelve la compañia a la que pertenece
|
||||
* @throws Error si no hay una compañia definida en COMPAÑIASICCID con el codigo
|
||||
*/
|
||||
private compañiaFromIccid(iccid: string) {
|
||||
const caracteresCommpañia = iccid.slice(2, 6)
|
||||
const compañia = COMPAÑIASICCID.get(caracteresCommpañia)
|
||||
|
||||
if (compañia == undefined) throw new Error("El la compañia es desconocida: " + caracteresCommpañia)
|
||||
return compañia
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +1,89 @@
|
||||
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
|
||||
import { Result } from "sim-shared/domain/Result.js";
|
||||
import assert from "node:assert";
|
||||
import { EventBus } from "sim-shared/domain/EventBus.port";
|
||||
import { SimEvents } from "sim-shared/domain/SimEvents";
|
||||
import { uuidv7 } from "uuidv7";
|
||||
import { CreateOrderDTO, OrderTracking, OrderType, OrderTypeOptions } from "sim-shared/domain/Order.js";
|
||||
|
||||
/**
|
||||
* TODO:
|
||||
* - Conexion con la BDD
|
||||
* - Conexion con RabbitMQ
|
||||
* - Pasar a clase cuando existan las conexiones
|
||||
* Casos de uso de tarjetas sim. Garantiza que todos los metodos usan el mismo bus de mensajes
|
||||
* y repositorio de registro de las ordenes.
|
||||
*/
|
||||
export class SimUsecases {
|
||||
private eventBus: EventBus
|
||||
private eventBus: EventBus;
|
||||
private orderRepository: OrderRepository;
|
||||
|
||||
constructor(args: {
|
||||
eventBus: EventBus
|
||||
eventBus: EventBus,
|
||||
orderRepository: OrderRepository
|
||||
}
|
||||
) {
|
||||
this.eventBus = args.eventBus
|
||||
this.orderRepository = args.orderRepository
|
||||
}
|
||||
|
||||
/**
|
||||
* Añade un id de mensaje (correlation_id en la base de datos) a los mensajes que van a entrar en la cola
|
||||
*/
|
||||
private addMessage_id(event: SimEvents.general): SimEvents.general & { headers: { message_id: string } } {
|
||||
const uuid = uuidv7()
|
||||
return {
|
||||
...event,
|
||||
headers: {
|
||||
...event.headers,
|
||||
message_id: uuid
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* El tipo T es el tipo del payload del Order
|
||||
*/
|
||||
private async saveOrder<T extends any>(event: SimEvents.general): Promise<Result<string, OrderTracking<T>>> {
|
||||
if (event.headers?.message_id == undefined) {
|
||||
return <Result<string, any>>{
|
||||
error: "El evento no tiene una cabecera message_id definido"
|
||||
}
|
||||
}
|
||||
|
||||
const orderType = (event.key.split(".")[2] as OrderType ?? "unknown")
|
||||
|
||||
// Estoy pensando en la posibilidad de pasarlo a unknown
|
||||
if (!OrderTypeOptions.has(orderType)) {
|
||||
return <Result<string, any>>{
|
||||
error: `El evento no tiene un tipo valido: ${orderType} no existe como tipo valido`
|
||||
}
|
||||
}
|
||||
|
||||
const order: CreateOrderDTO = {
|
||||
correlation_id: event.headers.message_id,
|
||||
order_type: orderType,
|
||||
routing_key: event.key,
|
||||
payload: event
|
||||
}
|
||||
|
||||
const result = await this.orderRepository.createOrder<T>(order)
|
||||
return result;
|
||||
}
|
||||
|
||||
async test(args: { iccid: string }) {
|
||||
assert(args.iccid != undefined)
|
||||
const event = <SimEvents.general>{
|
||||
key: `sim.test.unknown`,
|
||||
payload: {
|
||||
iccid: args.iccid
|
||||
}
|
||||
}
|
||||
const eventWithId = this.addMessage_id(event)
|
||||
|
||||
const publish = await this.eventBus.publish([eventWithId])
|
||||
await this.saveOrder(eventWithId)
|
||||
return eventWithId
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO:
|
||||
* Crea una nueva sim de la que no se tenia registro anteriormente
|
||||
* Si ya existia se modifican los campos pero no se hace un cambio
|
||||
* de estado.
|
||||
@@ -34,8 +100,8 @@ export class SimUsecases {
|
||||
return this.eventBus.publish([activationEvent])
|
||||
}
|
||||
|
||||
async activation(args: { iccid: string, compañia: string, offer: string }) {
|
||||
|
||||
async activation(args: { iccid: string, compañia: string, offer: string }):
|
||||
Promise<Result<string, { iccid: string, message_id: string, operation: "activation" }>> {
|
||||
const activationEvent = <SimEvents.activation>{
|
||||
key: `sim.${args.compañia}.activate`,
|
||||
payload: {
|
||||
@@ -43,11 +109,59 @@ export class SimUsecases {
|
||||
offer: args.offer
|
||||
}
|
||||
}
|
||||
console.log("[d] Activation ", activationEvent)
|
||||
return this.eventBus.publish([activationEvent])
|
||||
const activationWithId = this.addMessage_id(activationEvent)
|
||||
console.log("[d] Activation ", activationWithId)
|
||||
await this.eventBus.publish([activationWithId])
|
||||
const createdOrder = await this.saveOrder<SimEvents.activation>(activationWithId)
|
||||
|
||||
if (createdOrder.error != undefined) {
|
||||
console.error(createdOrder.error)
|
||||
return {
|
||||
error: createdOrder.error
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
data: {
|
||||
iccid: args.iccid,
|
||||
operation: "activation",
|
||||
message_id: createdOrder.data?.correlation_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async preActivation(args: { iccid: string, compañia: string }) {
|
||||
async reActivation(args: { iccid: string, compañia: string, offer: string }):
|
||||
Promise<Result<string, { iccid: string, message_id: string, operation: "reactivate" }>> {
|
||||
const activationEvent = <SimEvents.activation>{
|
||||
key: `sim.${args.compañia}.reactivate`,
|
||||
payload: {
|
||||
iccid: args.iccid,
|
||||
offer: args.offer
|
||||
}
|
||||
}
|
||||
const activationWithId = this.addMessage_id(activationEvent)
|
||||
console.log("[d] Reactivation ", activationWithId)
|
||||
await this.eventBus.publish([activationWithId])
|
||||
const createdOrder = await this.saveOrder<SimEvents.reActivation>(activationWithId)
|
||||
|
||||
if (createdOrder.error != undefined) {
|
||||
console.error(createdOrder.error)
|
||||
return {
|
||||
error: createdOrder.error
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
data: {
|
||||
iccid: args.iccid,
|
||||
operation: "reactivate",
|
||||
message_id: createdOrder.data?.correlation_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async preActivation(args: { iccid: string, compañia: string }):
|
||||
Promise<Result<string, { iccid: string, message_id: string, operation: "preactivation" }>> {
|
||||
|
||||
const preActivationEvent = <SimEvents.preActivation>{
|
||||
key: `sim.${args.compañia}.preActivate`,
|
||||
@@ -56,22 +170,58 @@ export class SimUsecases {
|
||||
}
|
||||
}
|
||||
console.log("[d] Pre - activation ", preActivationEvent)
|
||||
return this.eventBus.publish([preActivationEvent])
|
||||
await this.eventBus.publish([preActivationEvent])
|
||||
const preactivationWithId = this.addMessage_id(preActivationEvent)
|
||||
const createdOrder = await this.saveOrder<SimEvents.preActivation>(preactivationWithId)
|
||||
if (createdOrder.error != undefined) {
|
||||
console.error(createdOrder.error)
|
||||
return {
|
||||
error: createdOrder.error
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
data: {
|
||||
iccid: args.iccid,
|
||||
operation: "preactivation",
|
||||
message_id: createdOrder.data?.correlation_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Para objenious es terminate
|
||||
*/
|
||||
async cancelation(args: { iccid: string, compañia: string }) {
|
||||
async cancelation(args: { iccid: string, compañia: string }):
|
||||
Promise<Result<string, { iccid: string, message_id: string, operation: "cancelation" }>> {
|
||||
|
||||
const activationEvent = <SimEvents.cancel>{
|
||||
const cancelationEvent = <SimEvents.cancel>{
|
||||
key: `sim.${args.compañia}.cancel`,
|
||||
payload: {
|
||||
iccid: args.iccid
|
||||
}
|
||||
}
|
||||
console.log("[d] Cancelation ", activationEvent)
|
||||
return this.eventBus.publish([activationEvent])
|
||||
|
||||
const cancelationWithId = this.addMessage_id(cancelationEvent)
|
||||
console.log("[d] Cancelation ", cancelationWithId)
|
||||
|
||||
await this.eventBus.publish([cancelationWithId])
|
||||
const savedOrder = await this.saveOrder(cancelationWithId)
|
||||
|
||||
if (savedOrder.error != undefined) {
|
||||
console.error(savedOrder.error)
|
||||
return {
|
||||
error: savedOrder.error
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
data: {
|
||||
iccid: args.iccid,
|
||||
message_id: savedOrder.data.correlation_id,
|
||||
operation: "cancelation"
|
||||
}
|
||||
}
|
||||
}
|
||||
// alias por si acaso
|
||||
public terminate = this.cancelation;
|
||||
@@ -79,16 +229,37 @@ export class SimUsecases {
|
||||
/**
|
||||
* alias de bloquear / suspender en objenious
|
||||
*/
|
||||
async pause(args: { iccid: string, compañia: string }) {
|
||||
const cancelationEvent = <SimEvents.pause>{
|
||||
async pause(args: { iccid: string, compañia: string }):
|
||||
Promise<Result<string, { iccid: string, message_id: string, operation: "pause" }>> {
|
||||
const pauseEvent = <SimEvents.pause>{
|
||||
key: `sim.${args.compañia}.pause`,
|
||||
payload: {
|
||||
iccid: args.iccid
|
||||
}
|
||||
}
|
||||
|
||||
return this.eventBus.publish([cancelationEvent])
|
||||
const pauseWithId = this.addMessage_id(pauseEvent)
|
||||
console.log("[d] Pause", pauseWithId)
|
||||
await this.eventBus.publish([pauseWithId])
|
||||
//await this.saveOrder(pauseWithId)
|
||||
const savedOrder = await this.saveOrder<SimEvents.pause>(pauseWithId)
|
||||
|
||||
if (savedOrder.error != undefined) {
|
||||
console.error(savedOrder.error)
|
||||
return {
|
||||
error: savedOrder.error
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
data: {
|
||||
iccid: args.iccid,
|
||||
message_id: savedOrder.data.correlation_id,
|
||||
operation: "pause"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async free(args: { iccid: string, compañia: string }) {
|
||||
const cancelationEvent = <SimEvents.free>{
|
||||
key: `sim.${args.compañia}.free`,
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
import { describe, it } from "node:test";
|
||||
import { iccidValidator } from "./httpValidators.js";
|
||||
import assert from "node:assert";
|
||||
|
||||
describe("test validators", () => {
|
||||
it("should validate 19 char iccid", () => {
|
||||
const validBody = {
|
||||
iccid: "8933201125068886692"
|
||||
}
|
||||
const res = iccidValidator.validate(validBody)
|
||||
assert(res.error == undefined)
|
||||
}),
|
||||
|
||||
// TODO: Nada de esto es valido, a partir de ahora los validadores no lanzan excepcion sino Result
|
||||
it("shouldnt validate empty string iccid", () => {
|
||||
const validBody = {
|
||||
iccid: ""
|
||||
}
|
||||
|
||||
assert
|
||||
.throws(() => iccidValidator.validate(validBody), { message: "La longitud del iccid es incorrecta debera ser de 19 caracteres" })
|
||||
|
||||
}),
|
||||
|
||||
it("shouldnt validate >19 char iccid", () => {
|
||||
const validBody = {
|
||||
iccid: "893320112506888669212345"
|
||||
}
|
||||
assert
|
||||
.throws(() => iccidValidator.validate(validBody), { message: "La longitud del iccid es incorrecta debera ser de 19 caracteres" })
|
||||
}),
|
||||
it("shouldnt validate <19 char iccid", () => {
|
||||
const validBody = {
|
||||
iccid: "8933201125"
|
||||
}
|
||||
assert
|
||||
.throws(() => iccidValidator.validate(validBody), { message: "La longitud del iccid es incorrecta debera ser de 19 caracteres" })
|
||||
})
|
||||
})
|
||||
84
packages/sim-entrada-eventos/aplication/httpValidators.ts
Normal file
84
packages/sim-entrada-eventos/aplication/httpValidators.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import { companyFromIccid } from "#domain/companies.js";
|
||||
import { BodyValidator, Validator } from "sim-shared/aplication/BodyValidator.js";
|
||||
|
||||
const offers = new Map([
|
||||
["mensual", "SAVEFAMILY1"],
|
||||
["anual", "SAVEFAMILY2"],
|
||||
["SAVEFAMILY1", "SAVEFAMILY1"],
|
||||
["SAVEFAMILY2", "SAVEFAMILY2"],
|
||||
])
|
||||
|
||||
const iccidLongitudValidator = <Validator<{ iccid: string }>>{
|
||||
field: "iccid",
|
||||
errorMsg: "La longitud del iccid es incorrecta debera ser de 19 caracteres",
|
||||
validationFunc: (a: { iccid: string }) => a.iccid.length == 19,
|
||||
}
|
||||
|
||||
const iccidRequired = <Validator<{ iccid: string }>>{
|
||||
field: "iccid",
|
||||
errorMsg: "El iccid debe estara definido",
|
||||
validationFunc: (a: { iccid: string }) => a.iccid != undefined,
|
||||
}
|
||||
|
||||
const iccidWithValidCompany = <Validator<{ iccid: string }>>{
|
||||
field: "iccid",
|
||||
errorMsg: "El iccid no corresponde a una compañia registrada",
|
||||
validationFunc: (a: { iccid: string }) => companyFromIccid(a.iccid) != undefined,
|
||||
}
|
||||
|
||||
const offerExists = <Validator<{ offer: string }>>{
|
||||
field: "offer",
|
||||
errorMsg: "La oferta introducida no es valida",
|
||||
validationFunc: (a: { offer: string }) => offers.has(a.offer),
|
||||
}
|
||||
|
||||
const isUuidv7 = <Validator<{ correlation_id?: string }>>{
|
||||
field: "correlation_id",
|
||||
errorMsg: "El uuid no es un uuidv7 valido",
|
||||
validationFunc: (a) => a.correlation_id != undefined && a.correlation_id.length < 36
|
||||
}
|
||||
|
||||
const definedId = <Validator<{ id?: number }>>{
|
||||
field: "id",
|
||||
errorMsg: "El id no se ha definido",
|
||||
validationFunc: (e) => e.id != undefined
|
||||
}
|
||||
|
||||
const isIntegerId = <Validator<{ id?: number }>>{
|
||||
field: "id",
|
||||
errorMsg: "El id no se ha definido",
|
||||
validationFunc: (e) => Number.isInteger(e.id)
|
||||
}
|
||||
|
||||
const validNumericId = <Validator<{ id?: number }>>{
|
||||
field: "id",
|
||||
errorMsg: "El id introducido no es un numero >= 0",
|
||||
validationFunc: (e) => e.id! >= 0
|
||||
}
|
||||
|
||||
export const activationValidator = new BodyValidator<{ iccid: string, offer: string }>(
|
||||
[
|
||||
iccidRequired,
|
||||
iccidLongitudValidator,
|
||||
iccidWithValidCompany,
|
||||
offerExists,
|
||||
]
|
||||
)
|
||||
|
||||
export const iccidValidator = new BodyValidator<{ iccid: string }>(
|
||||
[
|
||||
iccidRequired,
|
||||
iccidLongitudValidator,
|
||||
iccidWithValidCompany,
|
||||
]
|
||||
)
|
||||
|
||||
export const uuidValidator = new BodyValidator<{ correlation_id?: string }>([
|
||||
isUuidv7
|
||||
])
|
||||
|
||||
export const idValidator = new BodyValidator<{ id?: number }>([
|
||||
definedId,
|
||||
isIntegerId,
|
||||
validNumericId
|
||||
])
|
||||
@@ -1,7 +1,8 @@
|
||||
import { loadEnvFile } from "node:process";
|
||||
import path from "node:path";
|
||||
|
||||
loadEnvFile(path.join(import.meta.dirname, "../../../../.env"))
|
||||
|
||||
loadEnvFile(path.join("../../.env")) // Global
|
||||
|
||||
export const env = {
|
||||
ENVIRONMENT: process.env.ENVIORMENT,
|
||||
@@ -12,8 +13,8 @@ export const env = {
|
||||
POSTGRES_HOST: process.env.POSTGRES_HOST,
|
||||
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE,
|
||||
RABBITMQ_HOST: String(process.env.RABBITMQ_HOST ?? "localhost"),
|
||||
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "guest"),
|
||||
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "guest"),
|
||||
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "test"),
|
||||
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "test"),
|
||||
RABBITMQ_EXCHANGE: String(process.env.RABBITMQ_EXCHANGE ?? "/"),
|
||||
RABBITMQ_PORT: parseInt(process.env.RABBITMQ_PORT ?? "5672"),
|
||||
RABBITMQ_MODULENAME: process.env.MODULENAME,
|
||||
@@ -21,4 +22,5 @@ export const env = {
|
||||
RABBITMQ_SECURE: process.env.RABBITMQ_SECURE,
|
||||
RABBITMQ_RETRY_INTERVAL: process.env.RABBITMQ_INTERVAL,
|
||||
RABBITMQ_VHOST: String(process.env.RABBITMQ_VHOST),
|
||||
CONNECTIONS_URL: String(process.env.CONNECTIONS_URL)
|
||||
};
|
||||
|
||||
18
packages/sim-entrada-eventos/config/postgreConfig.ts
Normal file
18
packages/sim-entrada-eventos/config/postgreConfig.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { Pool } from 'pg';
|
||||
import { PgClient } from 'sim-shared/infrastructure/PgClient.js'
|
||||
import { env } from './env/index.js';
|
||||
|
||||
// Configuracion de la conexion a la BDD, deberia ser la
|
||||
// Misma para todos los servicios pero hasta que se unifique todo
|
||||
// se hace una por servicio.
|
||||
export const pgPool = new Pool({
|
||||
user: env.POSTGRES_USER,
|
||||
host: env.POSTGRES_HOST,
|
||||
database: env.POSTGRES_DATABASE,
|
||||
password: env.POSTGRES_PASSWORD,
|
||||
port: Number(env.POSTGRES_PORT) || 5432,
|
||||
});
|
||||
|
||||
export const postgresClient = new PgClient({
|
||||
pool: pgPool
|
||||
})
|
||||
6
packages/sim-entrada-eventos/domain/common.ts
Normal file
6
packages/sim-entrada-eventos/domain/common.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
|
||||
export type PaginationArgs = {
|
||||
limit?: number,
|
||||
offset?: number,
|
||||
start?: number
|
||||
}
|
||||
22
packages/sim-entrada-eventos/domain/companies.ts
Normal file
22
packages/sim-entrada-eventos/domain/companies.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
// Partiendo del caracter 3 2 de pais + 2 de compañia
|
||||
// Metiendolo a la BDD podria ser mas dinamico pero perderia
|
||||
// tiempo de query
|
||||
// Puede que esté bien crear un endpoint para administrarlo
|
||||
export const COMPANYICCID = new Map<string, string>(
|
||||
[
|
||||
["3490", "alai"],
|
||||
["3510", "nos"],
|
||||
["3320", "objenious"]
|
||||
])
|
||||
|
||||
/**
|
||||
* A partir del iccid completo devuelve la compañia a la que pertenece
|
||||
* @throws Error si no hay una compañia definida en COMPAÑIASICCID con el codigo
|
||||
*/
|
||||
export function companyFromIccid(iccid: string) {
|
||||
const caracteresCommpañia = iccid.slice(2, 6)
|
||||
const compañia = COMPANYICCID.get(caracteresCommpañia)
|
||||
|
||||
if (compañia == undefined) throw new Error("El la compañia es desconocida: " + caracteresCommpañia)
|
||||
return compañia
|
||||
}
|
||||
@@ -1,8 +1,11 @@
|
||||
import express from "express"
|
||||
import cors from 'cors';
|
||||
import path from 'path';
|
||||
import { simRoutes } from "./infrastructure/simRoutes.http.js"
|
||||
import { rabbitmqEventBus } from '#config/eventBusConfig.js';
|
||||
import { env } from "#config/env/index.js"
|
||||
import { orderRoutes } from "#adapters/orderRoutes.http.js";
|
||||
import { connectionsRoutes } from "#adapters/simconnectionsRoutes.js";
|
||||
|
||||
const PORT = env.API_PORT
|
||||
const HOSTNAME = "0.0.0.0"
|
||||
@@ -24,6 +27,10 @@ app.use(express.json());
|
||||
app.use(express.urlencoded({ extended: true }));
|
||||
|
||||
app.use("/sim", simRoutes)
|
||||
app.use("/simconnections", connectionsRoutes)
|
||||
app.use("/orders", orderRoutes)
|
||||
|
||||
app.use("/docs", express.static(path.join(process.cwd(), '../../docs')))
|
||||
|
||||
app.get("/health", (req, res) => {
|
||||
res.status(200).json({ status: "ok" })
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
/**
|
||||
* Rutas para consultar el estado de los order
|
||||
*/
|
||||
|
||||
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
|
||||
import { Router } from "express"
|
||||
import { postgresClient } from '#config/postgreConfig.js';
|
||||
import { OrderController } from "../aplication/Order.controller.js";
|
||||
import { OrderUsecases } from "../aplication/Order.usecases.js";
|
||||
|
||||
const orderRoutes = Router()
|
||||
// orderRepository no se trata como singleton
|
||||
const orderRepository = new OrderRepository(postgresClient)
|
||||
const orderUseCases = new OrderUsecases({
|
||||
orderRepository: orderRepository
|
||||
})
|
||||
const orderController = new OrderController({
|
||||
orderUseCases: orderUseCases
|
||||
})
|
||||
|
||||
/**
|
||||
* Todas las orders, o un resumen, admite filtros
|
||||
* por:
|
||||
* - status
|
||||
* - fecha inicio
|
||||
* - fecha fin
|
||||
* - pendientes
|
||||
* */
|
||||
orderRoutes.get("/", (req, res) => { res.send("ok") })
|
||||
|
||||
orderRoutes.get("/message_id/:correlation_id", orderController.getByQueueId())
|
||||
|
||||
/** Operaciones pendientes */
|
||||
orderRoutes.get("/pending", orderController.getPending())
|
||||
|
||||
/** Order por id (uuid del mensaje) */
|
||||
orderRoutes.get("/:id", orderController.getById())
|
||||
|
||||
export { orderRoutes }
|
||||
|
||||
@@ -2,22 +2,28 @@ import { rabbitmqEventBus } from '#config/eventBusConfig.js';
|
||||
import { SimUsecases } from '../aplication/Sim.usecases.js';
|
||||
import { SimController } from '../aplication/Sim.controller.js';
|
||||
import { Router } from 'express';
|
||||
import { OrderRepository } from 'sim-shared/infrastructure/OrderRepository.js';
|
||||
import { postgresClient } from '#config/postgreConfig.js';
|
||||
|
||||
const simRoutes = Router()
|
||||
const orderRepository = new OrderRepository(postgresClient)
|
||||
|
||||
const simUseCases = new SimUsecases({
|
||||
eventBus: rabbitmqEventBus
|
||||
eventBus: rabbitmqEventBus,
|
||||
orderRepository: orderRepository
|
||||
})
|
||||
|
||||
const simController = new SimController({
|
||||
simUseCases: simUseCases
|
||||
})
|
||||
|
||||
// TODO: status de todos los proyectos
|
||||
simRoutes.get("/status", () => { })
|
||||
|
||||
simRoutes.post("/save", simController.save())
|
||||
|
||||
simRoutes.post("/activate", simController.activation())
|
||||
simRoutes.post("/reActivate", simController.reActivation())
|
||||
|
||||
simRoutes.post("/preActivate", simController.preactivation())
|
||||
|
||||
@@ -25,7 +31,10 @@ simRoutes.post("/pause", simController.pause())
|
||||
|
||||
simRoutes.post("/cancel", simController.cancelation())
|
||||
|
||||
simRoutes.post("/test", simController.test())
|
||||
|
||||
// Proceso especifico de ALAI para liberar sims canceladas
|
||||
simRoutes.post("/free", simController.free())
|
||||
|
||||
|
||||
export { simRoutes }
|
||||
|
||||
@@ -0,0 +1,87 @@
|
||||
import { env } from "#config/env/index.js"
|
||||
import { Router } from "express"
|
||||
import { ClientRequest, IncomingMessage } from "http"
|
||||
import { createProxyMiddleware } from "http-proxy-middleware"
|
||||
import { Request } from "express"
|
||||
|
||||
export const connectionsRoutes = Router()
|
||||
|
||||
const CONNECTIONS_URL = env.CONNECTIONS_URL// TODO: Meter al ENV
|
||||
//const CONNECTIONS_URL = "http://sf-nfc-server.savefamilygps.net"
|
||||
|
||||
console.log("CONNURL: ", CONNECTIONS_URL)
|
||||
|
||||
connectionsRoutes.use("", createProxyMiddleware({
|
||||
target: CONNECTIONS_URL,
|
||||
changeOrigin: true,
|
||||
pathRewrite: {
|
||||
'^/': "/simconnections/"
|
||||
},
|
||||
on: {
|
||||
proxyReq: (proxyReq: ClientRequest, req: Request) => {
|
||||
const protocol = req.protocol;
|
||||
const host = req.get('host');
|
||||
const originalFullUrl = `${protocol}://${host}${req.originalUrl}`;
|
||||
const destinationFullUrl = `${CONNECTIONS_URL}${proxyReq.path}`;
|
||||
/*
|
||||
constnsole.log('──────────────────────────────────────────────────');
|
||||
console.log(`[PROXY_DEBUG]`);
|
||||
console.log(` ENTRADA: ${originalFullUrl}`);
|
||||
console.log(` MÉTODO : ${req.method}`);
|
||||
console.log(` DESTINO: ${destinationFullUrl}`);
|
||||
console.log('──────────────────────────────────────────────────');
|
||||
*/
|
||||
console.log(`[Proxy Req]: ${req.method} ${req.url} -> ${proxyReq.path}`);
|
||||
},
|
||||
proxyRes: (proxyRes, req, res) => {
|
||||
console.log(`[Proxy Res] Status: ${proxyRes.statusCode} desde ${req.url}`);
|
||||
},
|
||||
error: (err, req, res) => {
|
||||
console.error('[Proxy Error]:', err);
|
||||
|
||||
// Validamos que 'res' tenga el método 'status' (típico de Express Response)
|
||||
if ('status' in res) {
|
||||
//@ts-ignore
|
||||
res.status(500).json({ message: 'Error interno en el Gateway' });
|
||||
}
|
||||
},
|
||||
}
|
||||
}))
|
||||
|
||||
|
||||
|
||||
// Rutas
|
||||
/**
|
||||
connectionsRoutes.post('/simconnections/alai/preactivate',);
|
||||
connectionsRoutes.get('/simconnections/alai/pause',);
|
||||
connectionsRoutes.post('/simconnections/alai/terminate',);
|
||||
connectionsRoutes.get('/simconnections/alai/pauseByPhone',);
|
||||
connectionsRoutes.get('/simconnections/alai/active',);
|
||||
connectionsRoutes.get('/simconnections/alai/change_orderid',);
|
||||
connectionsRoutes.get('/simconnections/alai/select',);
|
||||
connectionsRoutes.get('/simconnections/alai/select-iccid',);
|
||||
connectionsRoutes.get('/simconnections/alai/selectFromDb',);
|
||||
connectionsRoutes.get('/simconnections/alai/selectPage',);
|
||||
connectionsRoutes.post('/simconnections/alai/schedulePause',);
|
||||
connectionsRoutes.get('/simconnections/shopify/getbyWP',);
|
||||
connectionsRoutes.get('/simconnections/shopify/getbyWPS',);
|
||||
|
||||
///
|
||||
|
||||
connectionsRoutes.get('/simconnections/sim/associate',);
|
||||
connectionsRoutes.post('/simconnections/sim/search',);
|
||||
connectionsRoutes.post('/simconnections/sim/historic',);
|
||||
connectionsRoutes.post('/simconnections/sim/update',);
|
||||
|
||||
///
|
||||
|
||||
connectionsRoutes.post('/simconnections/nos/activate',);
|
||||
connectionsRoutes.get('/simconnections/nos/select',);
|
||||
connectionsRoutes.get('/simconnections/nos/selectPage',);
|
||||
|
||||
//Unificación
|
||||
connectionsRoutes.post('/simconnections/sim/active',); // True false
|
||||
connectionsRoutes.patch('/simconnections/sim/pause',);
|
||||
connectionsRoutes.get('/simconnections/sim/select',);
|
||||
connectionsRoutes.get('/simconnections/sim/select-phone',);
|
||||
**/
|
||||
@@ -13,12 +13,6 @@
|
||||
"types": "./config/*.ts",
|
||||
"default": "./config/*.js"
|
||||
},
|
||||
"#shared/*.js": {
|
||||
"default": "../sim-shared/*.js"
|
||||
},
|
||||
"#shared/*": {
|
||||
"default": "../sim-shared/*.js"
|
||||
},
|
||||
"#adapters/*.js": {
|
||||
"types": "./infrastructure/*.ts",
|
||||
"default": "./infrastructure/*.js"
|
||||
@@ -45,7 +39,7 @@
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"test": "node --import tsx --test ./**/*.test.ts",
|
||||
"build": "tsc --build && tsc-alias -p tsconfig.json && cp package.json ../../dist/packages/sim-entrada-eventos/",
|
||||
"dev": "tsx watch index.ts",
|
||||
"start": "node ../../dist/packages/sim-entrada-eventos/index.js"
|
||||
@@ -59,6 +53,8 @@
|
||||
"cors": "*",
|
||||
"dotenv": "*",
|
||||
"express": "*",
|
||||
"http-proxy-middleware": "^3.0.5",
|
||||
"sim-shared": "sim-shared:*",
|
||||
"typescript": "*"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -5,6 +5,8 @@ OBJ_CLI_ASSERTION=XOc7FtwXD8hUX2SFVX94XSty8wkOmChkwDNF09O_aIxPubMDdFUdCDCB4zpzSI
|
||||
OBJ_CLIENT_ID=savefamily_rest_ws
|
||||
OBJ_KID=xNfbMiyL1ORXGP8lElhcv8nVaG3EJKye4Lc1YoN3I1E
|
||||
OBJ_BASE_URL=https://api-getway.objenious.com/ws
|
||||
//OBJ_BASE_URL=https://api-getway.objenious.com/ws/test
|
||||
# OBJ_BASE_URL=https://api-getway.objenious.com/ws/test
|
||||
|
||||
NOTIFICATION_URL=https://api-paloma.com
|
||||
NOTIFICATION_URL="https://sf-sim-activation.savefamilygps.net/send-activation-mail"
|
||||
# NOTIFICATION_URL="localhost"
|
||||
SIM_ACTIVATION_API_KEY=9e48c4ac-1ab0-4397-b3f3-6c239200dfe6
|
||||
|
||||
24
packages/sim-objenious-cron/config/env/index.ts
vendored
24
packages/sim-objenious-cron/config/env/index.ts
vendored
@@ -1,5 +1,6 @@
|
||||
import { loadEnvFile } from "node:process";
|
||||
import path from "node:path";
|
||||
import assert from "node:assert";
|
||||
|
||||
loadEnvFile(path.join("../../.env")) // Global
|
||||
loadEnvFile(path.join("./.env")) // base
|
||||
@@ -12,9 +13,9 @@ export const env = {
|
||||
POSTGRES_HOST: process.env.POSTGRES_HOST,
|
||||
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE,
|
||||
RABBITMQ_HOST: String(process.env.RABBITMQ_HOST ?? "localhost"),
|
||||
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "guest"),
|
||||
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "guest"),
|
||||
RABBITMQ_EXCHANGE: String(process.env.RABBITMQ_EXCHANGE ?? "/"),
|
||||
RABBITMQ_USER: String(process.env.RABBITMQ_USER),
|
||||
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD),
|
||||
RABBITMQ_EXCHANGE: String(process.env.RABBITMQ_EXCHANGE),
|
||||
RABBITMQ_PORT: parseInt(process.env.RABBITMQ_PORT ?? "5672"),
|
||||
RABBITMQ_MODULENAME: process.env.MODULENAME,
|
||||
RABBITMQ_TTL: process.env.RABBITMQ_TTL,
|
||||
@@ -30,6 +31,21 @@ export const env = {
|
||||
OBJ_KID: String(process.env.OBJ_KID),
|
||||
OBJ_BASE_URL: String(process.env.OBJ_BASE_URL),
|
||||
|
||||
NOTIFICATION_URL: String(process.env.NOTIFICATION_URL)
|
||||
NOTIFICATION_URL: String(process.env.NOTIFICATION_URL ?? ""),
|
||||
SIM_ACTIVATION_API_KEY: String(process.env.SIM_ACTIVATION_API_KEY ?? "")
|
||||
};
|
||||
|
||||
// assert las partes criticas
|
||||
assert(env.RABBITMQ_PASSWORD != undefined)
|
||||
assert(env.RABBITMQ_USER != undefined)
|
||||
assert(env.SIM_ACTIVATION_API_KEY != "")
|
||||
assert(env.NOTIFICATION_URL != "")
|
||||
|
||||
if (env.ENVIRONMENT == "production") {
|
||||
assert(env.RABBITMQ_PASSWORD != "guest")
|
||||
assert(env.RABBITMQ_HOST != "localhost")
|
||||
}
|
||||
|
||||
|
||||
console.log("[i] verificado env")
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js"
|
||||
import { env } from "./env/index.js"
|
||||
import { JWTService } from "packages/sim-consumidor-objenious/aplication/JWT.service.js"
|
||||
import { jwtService } from "./jwtService.config.js"
|
||||
|
||||
|
||||
const OBJ_BASE_URL = env.OBJ_BASE_URL
|
||||
|
||||
@@ -9,5 +10,5 @@ export const httpInstance = new HttpClient({
|
||||
headers: {
|
||||
"content-type": " application/json; charset=utf-8"
|
||||
},
|
||||
jwtManager: new JWTService()
|
||||
jwtManager: jwtService
|
||||
})
|
||||
|
||||
20
packages/sim-objenious-cron/config/intranetPostgresConfig.ts
Normal file
20
packages/sim-objenious-cron/config/intranetPostgresConfig.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
* Cliente de postgres para la intranet. Se usa solo porque hace falta para el
|
||||
* volcado de datos, si se usa en mas partes algo estás haciendo mal.
|
||||
*/
|
||||
|
||||
import { Pool } from 'pg';
|
||||
import { PgClient } from 'sim-shared/infrastructure/PgClient.js'
|
||||
import { env } from './env/index.js';
|
||||
|
||||
export const pgPoolIntranet = new Pool({
|
||||
user: env.POSTGRES_USER,
|
||||
host: env.POSTGRES_HOST,
|
||||
database: "intranet",
|
||||
password: env.POSTGRES_PASSWORD,
|
||||
port: Number(env.POSTGRES_PORT) || 5432,
|
||||
});
|
||||
|
||||
export const postgresClientIntranet = new PgClient({
|
||||
pool: pgPoolIntranet
|
||||
})
|
||||
59
packages/sim-objenious-cron/config/jwtService.config.ts
Normal file
59
packages/sim-objenious-cron/config/jwtService.config.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { GrantAccessRequestBody, JWTService } from "sim-shared/aplication/JWT.service.js"
|
||||
import { env } from "./env/index.js"
|
||||
import { JWTHeader } from "sim-shared/domain/JWT.js"
|
||||
|
||||
|
||||
const PRIVATE_KEY_PATH = env.OBJ_PEM_PATH
|
||||
|
||||
const GET_TOKEN_URL = "https://idp.docapost.io/auth/realms/GETWAY/protocol/openid-connect/token"
|
||||
const REFRESH_TOKEN_URL = GET_TOKEN_URL
|
||||
|
||||
const DEFAULT_BODY: GrantAccessRequestBody = {
|
||||
grant_type: "client_credentials",
|
||||
client_id: env.OBJ_CLIENT_ID,
|
||||
client_assertion_type: "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
|
||||
client_assertion: env.OBJ_CLI_ASSERTION
|
||||
}
|
||||
|
||||
|
||||
const DEFAULT_HEADERS = {
|
||||
"content-type": "application/x-www-form-urlencoded"
|
||||
}
|
||||
|
||||
const DEFAULT_HEADERS_JWT = {
|
||||
alg: "RS256",
|
||||
typ: "JWT",
|
||||
kid: env.OBJ_KID,
|
||||
}
|
||||
|
||||
const DEFAULT_DATA_JWT = {
|
||||
sub: env.OBJ_CLIENT_ID,
|
||||
iss: env.OBJ_CLIENT_ID,
|
||||
aud: "https://idp.docapost.io/auth/realms/GETWAY",
|
||||
jti: Date.now().toString(),
|
||||
|
||||
}
|
||||
|
||||
function addIATHeaders(authHeaders: Object) {
|
||||
const headers = <JWTHeader>{
|
||||
...authHeaders,
|
||||
sub: env.OBJ_CLIENT_ID,
|
||||
iss: env.OBJ_CLIENT_ID,
|
||||
aud: GET_TOKEN_URL,
|
||||
jti: Date.now().toString(),
|
||||
iat: Math.floor(Date.now() / 1000),
|
||||
exp: Math.floor(Date.now() / 1000) + 5 * 60,
|
||||
}
|
||||
return headers
|
||||
}
|
||||
|
||||
export const jwtService = new JWTService({
|
||||
transformJWTHeaders: addIATHeaders,
|
||||
defaultHeaders: DEFAULT_HEADERS,
|
||||
defaultBody: DEFAULT_BODY,
|
||||
defaultJWTHeaders: DEFAULT_HEADERS_JWT,
|
||||
defaultJWTPayload: DEFAULT_DATA_JWT,
|
||||
privateKeyPath: PRIVATE_KEY_PATH,
|
||||
tokenUrl: GET_TOKEN_URL,
|
||||
refreshTokenUrl: REFRESH_TOKEN_URL
|
||||
})
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Pool, QueryResult } from 'pg';
|
||||
import { Pool } from 'pg';
|
||||
import { PgClient } from 'sim-shared/infrastructure/PgClient.js'
|
||||
import { env } from './env/index.js';
|
||||
|
||||
|
||||
@@ -3,7 +3,14 @@ import { pgPool } from "./config/postgreConfig.js"
|
||||
import { PgClient } from "sim-shared/infrastructure/PgClient.js"
|
||||
import { httpInstance } from "./config/httpClient.config.js"
|
||||
import { CheckObjeniousRequests } from "./tasks/check_objenious_request.js"
|
||||
import { OperationsRepository } from "sim-shared/infrastructure/OperationRepository.js"
|
||||
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js"
|
||||
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
|
||||
import { TaskVolcadoLineas } from "./tasks/volcado_lineas.js"
|
||||
import { ObjeniousLinesRepository } from "./infranstructure/ObjeniousLinesRepository.js"
|
||||
import { postgresClientIntranet } from "./config/intranetPostgresConfig.js"
|
||||
import { PauseCancelTaskRepository } from "packages/sim-consumidor-objenious/infrastructure/PauseCancelTaskRepository.js"
|
||||
import { PauseTerminateTask } from "./tasks/check_pause_terminate.js"
|
||||
import { SimUseCases } from "packages/sim-consumidor-objenious/aplication/Sim.usecases.js"
|
||||
|
||||
async function startCron() {
|
||||
const commonSettings = {
|
||||
@@ -13,30 +20,73 @@ async function startCron() {
|
||||
|
||||
const httpClient = httpInstance
|
||||
const pgClient = new PgClient({ pool: pgPool })
|
||||
|
||||
console.log("[i] Comprobando conexion con la BDD ")
|
||||
await pgClient.checkDatabaseConnection()
|
||||
await pgClient.checkDatabaseConnection()
|
||||
const operationRepository = new OperationsRepository(pgClient)
|
||||
|
||||
const operationRepository = new ObjeniousOperationsRepository(
|
||||
httpClient,
|
||||
pgClient,
|
||||
)
|
||||
const orderRepository = new OrderRepository(pgClient)
|
||||
const objeniousLineRepository = new ObjeniousLinesRepository(postgresClientIntranet)
|
||||
|
||||
const objTask = new CheckObjeniousRequests(
|
||||
operationRepository,
|
||||
httpClient
|
||||
orderRepository,
|
||||
httpClient,
|
||||
)
|
||||
|
||||
const interval = setInterval(async () => {
|
||||
console.log("Updating...")
|
||||
await objTask.getPendingOperations()
|
||||
console.log("Update finished")
|
||||
}, 60 * 1000)
|
||||
/*
|
||||
const task = cron.createTask("* * * * *", async () => {
|
||||
}
|
||||
, {
|
||||
...commonSettings,
|
||||
name: "Test"
|
||||
})
|
||||
*/
|
||||
const objeniosRepo = new ObjeniousOperationsRepository(
|
||||
httpClient,
|
||||
pgClient
|
||||
)
|
||||
|
||||
const volcadoLineasTask = new TaskVolcadoLineas(
|
||||
objeniousLineRepository,
|
||||
objeniosRepo
|
||||
)
|
||||
|
||||
const pauseRepo = new PauseCancelTaskRepository(pgClient)
|
||||
const simUsecases = new SimUseCases({
|
||||
httpClient: httpClient,
|
||||
operationRepository: operationRepository,
|
||||
orderRepository: orderRepository,
|
||||
pauseRepository: pauseRepo
|
||||
})
|
||||
|
||||
const pauseTask = new PauseTerminateTask(
|
||||
objeniosRepo,
|
||||
pauseRepo,
|
||||
simUsecases,
|
||||
orderRepository
|
||||
)
|
||||
|
||||
await objTask.getPendingOperations()
|
||||
const PERIODO_PETICIONES = 10 * 60 * 1000
|
||||
const interval = setInterval(async () => {
|
||||
try {
|
||||
await objTask.getPendingOperations()
|
||||
} catch (e) {
|
||||
console.error("[x] Error de actualizacion de las lineas ")
|
||||
}
|
||||
}, PERIODO_PETICIONES)
|
||||
|
||||
const PERIODO_VOLCADO = 60 * 60 * 1000
|
||||
const volcadoInterval = setInterval(async () => {
|
||||
try {
|
||||
await volcadoLineasTask.loadLines()
|
||||
} catch (e) {
|
||||
console.error("[x] Volcado de lineas de Objenious Fallido", e)
|
||||
}
|
||||
}, PERIODO_VOLCADO)
|
||||
|
||||
await pauseTask.run()
|
||||
const PERIODO_CANCELACIONES = 60 * 60 * 1000;
|
||||
const clacelacionesInterval = setInterval(async () => {
|
||||
await pauseTask.run()
|
||||
}, PERIODO_CANCELACIONES)
|
||||
|
||||
//await objTask.getPendingOperations()
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
import test, { after, before, describe } from "node:test";
|
||||
import { CreateObjeniousLineDTO } from "sim-shared/domain/objeniousLine.js";
|
||||
import { ObjeniousLinesRepository } from "./ObjeniousLinesRepository.js";
|
||||
import { postgrClient } from "../config/postgreConfig.js";
|
||||
import assert from "node:assert";
|
||||
|
||||
describe("Line insertion test", async () => {
|
||||
//const pgClient = postgreClientIntranet
|
||||
const pgClient = postgrClient // En prod hay que usar el de Intrantet para usar la otra base de datos
|
||||
const lineRepository = new ObjeniousLinesRepository(pgClient)
|
||||
const lineaTest: CreateObjeniousLineDTO = {
|
||||
simId: 1234,
|
||||
iccid: "9999999999999",
|
||||
msisdn: "34654674732",
|
||||
imei: "219789481293",
|
||||
imeiChangeDate: new Date(),
|
||||
offerCode: "SAVEFAMILY1",
|
||||
status: "ACTIVATED",
|
||||
preactivationDate: new Date(),
|
||||
activationDate: new Date(),
|
||||
commercialStatus: "test",
|
||||
commercialStatusDate: new Date(),
|
||||
billingStatus: "test",
|
||||
billingStatusChangeDate: new Date(),
|
||||
billingActivationDate: new Date(),
|
||||
createDate: new Date(),
|
||||
raw: { test: "test" } as any // Para este test no hace falta
|
||||
}
|
||||
|
||||
// Clean up before and after tests to ensure isolation
|
||||
const cleanup = async () => {
|
||||
await pgClient.query("DELETE FROM objenious_lines WHERE simId = 1234");
|
||||
};
|
||||
|
||||
before(async () => {
|
||||
await cleanup()
|
||||
})
|
||||
|
||||
after(async () => {
|
||||
await cleanup()
|
||||
})
|
||||
|
||||
test("Should insert new line", async () => {
|
||||
const res = await lineRepository.insertOrUpdate(lineaTest)
|
||||
assert.ok(res != undefined, "The line wasn't created")
|
||||
})
|
||||
|
||||
test("Should not update a line if the hash is the same", async () => {
|
||||
const res = await lineRepository.insertOrUpdate(lineaTest)
|
||||
assert.ok(res == undefined, "The line have been updated")
|
||||
})
|
||||
|
||||
test("Should update a line if the hash changes", async () => {
|
||||
const updated = structuredClone(lineaTest)
|
||||
lineaTest.billingActivationDate = new Date()
|
||||
const res = await lineRepository.insertOrUpdate(lineaTest)
|
||||
assert.ok(res != undefined, "The line have been updated")
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,112 @@
|
||||
/**
|
||||
* Repositorio para el volcado de lineas de objenious en intranet
|
||||
* solo para uso en el volcado.
|
||||
*/
|
||||
import { createHash } from "node:crypto";
|
||||
import { PoolClient } from "pg";
|
||||
import { CreateObjeniousLineDTO } from "sim-shared/domain/objeniousLine.js";
|
||||
import { PgClient } from "sim-shared/infrastructure/PgClient.js";
|
||||
|
||||
export class ObjeniousLinesRepository {
|
||||
constructor(
|
||||
private pgClient: PgClient
|
||||
) {
|
||||
}
|
||||
|
||||
private generateLineHash(data: CreateObjeniousLineDTO) {
|
||||
try {
|
||||
const lineStr = JSON.stringify(data)
|
||||
const hash = createHash("sha256").update(lineStr).digest("base64url")
|
||||
return hash
|
||||
} catch (e) {
|
||||
console.error("[x] Error generando el hash de la linea", data)
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
public async insertOrUpdate(data: CreateObjeniousLineDTO) {
|
||||
const query = `
|
||||
INSERT INTO objenious_lines (
|
||||
simId,
|
||||
iccid,
|
||||
msisdn,
|
||||
imei,
|
||||
imeiChangeDate,
|
||||
offerCode,
|
||||
status,
|
||||
preactivationDate,
|
||||
activationDate,
|
||||
commercialStatus,
|
||||
commercialStatusDate,
|
||||
billingStatus,
|
||||
billingStatusChangeDate,
|
||||
billingActivationDate,
|
||||
createDate,
|
||||
raw,
|
||||
hash
|
||||
) VALUES (
|
||||
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17
|
||||
)
|
||||
ON CONFLICT (simId)
|
||||
DO UPDATE SET
|
||||
iccid = EXCLUDED.iccid,
|
||||
msisdn = EXCLUDED.msisdn,
|
||||
imei = EXCLUDED.imei,
|
||||
imeiChangeDate = EXCLUDED.imeiChangeDate,
|
||||
offerCode = EXCLUDED.offerCode,
|
||||
status = EXCLUDED.status,
|
||||
preactivationDate = EXCLUDED.preactivationDate,
|
||||
activationDate = EXCLUDED.activationDate,
|
||||
commercialStatus = EXCLUDED.commercialStatus,
|
||||
commercialStatusDate = EXCLUDED.commercialStatusDate,
|
||||
billingStatus = EXCLUDED.billingStatus,
|
||||
billingStatusChangeDate = EXCLUDED.billingStatusChangeDate,
|
||||
billingActivationDate = EXCLUDED.billingActivationDate,
|
||||
raw = EXCLUDED.raw,
|
||||
hash = EXCLUDED.hash
|
||||
WHERE objenious_lines.hash IS DISTINCT FROM EXCLUDED.hash
|
||||
RETURNING id;
|
||||
`;
|
||||
|
||||
const lineHash = this.generateLineHash(data)
|
||||
|
||||
if (lineHash == undefined) {
|
||||
console.error("[x] Ignorando linea ", data)
|
||||
return;
|
||||
}
|
||||
|
||||
const values = [
|
||||
data.simId,
|
||||
data.iccid,
|
||||
data.msisdn,
|
||||
data.imei,
|
||||
data.imeiChangeDate,
|
||||
data.offerCode,
|
||||
data.status,
|
||||
data.preactivationDate,
|
||||
data.activationDate,
|
||||
data.commercialStatus,
|
||||
data.commercialStatusDate,
|
||||
data.billingStatus,
|
||||
data.billingStatusChangeDate,
|
||||
data.billingActivationDate,
|
||||
data.createDate || new Date(), // Default a ahora si no viene
|
||||
JSON.stringify(data.raw), // El driver de pg requiere string o el objeto directo para JSONB
|
||||
lineHash
|
||||
];
|
||||
|
||||
let client: PoolClient | undefined = undefined;
|
||||
try {
|
||||
client = await this.pgClient.connect();
|
||||
const res = await client.query<{ id: number }>(query, values);
|
||||
return res.rows[0];
|
||||
} catch (err) {
|
||||
console.error('Error en la inserción:', err);
|
||||
throw err;
|
||||
} finally {
|
||||
if (client != undefined) {
|
||||
client.release()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,20 +5,6 @@
|
||||
"description": "",
|
||||
"main": "index.ts",
|
||||
"imports": {
|
||||
"#config/*.js": {
|
||||
"types": "./config/*.ts",
|
||||
"default": "./config/*.js"
|
||||
},
|
||||
"#config/*": {
|
||||
"types": "./config/*.ts",
|
||||
"default": "./config/*.js"
|
||||
},
|
||||
"#shared/*.js": {
|
||||
"default": "../sim-shared/*.js"
|
||||
},
|
||||
"#shared/*": {
|
||||
"default": "../sim-shared/*.js"
|
||||
},
|
||||
"#adapters/*.js": {
|
||||
"types": "./infrastructure/*.ts",
|
||||
"default": "./infrastructure/*.js"
|
||||
@@ -45,8 +31,8 @@
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "tsc --build && tsc-alias -p tsconfig.json && cp package.json ../../dist/packages/sim-objenious-cron/",
|
||||
"test": "node --import tsx --test ./**/*.test.ts",
|
||||
"build": "tsc --build && tsc-alias -p tsconfig.json && cp .env package.json ../../dist/packages/sim-objenious-cron/",
|
||||
"dev": "tsx watch index.ts",
|
||||
"start": "node ../../dist/packages/sim-objenious-cron/index.js"
|
||||
},
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
export const task = async () => console.log("Background " + new Date().toISOString())
|
||||
@@ -1,10 +1,14 @@
|
||||
import { env } from "#config/env/index.js";
|
||||
import { env } from "../config/env/index.js";
|
||||
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
|
||||
import axios from "axios";
|
||||
import { IOperationsRepository, Objenious, ObjeniousOperation, ObjeniousOperationChange, StatusEnum } from "sim-shared/domain/operationsRepository.port.js";
|
||||
import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js";
|
||||
import { ObjeniousOperationsRepository } from "packages/sim-shared/infrastructure/ObjeniousOperationRepository.js";
|
||||
|
||||
export class CheckObjeniousRequests {
|
||||
constructor(
|
||||
private readonly operationsRepository: IOperationsRepository,
|
||||
private readonly operationsRepository: ObjeniousOperationsRepository,
|
||||
private readonly orderRepository: OrderRepository,
|
||||
private readonly httpClient: HttpClient
|
||||
) {
|
||||
}
|
||||
@@ -13,43 +17,51 @@ export class CheckObjeniousRequests {
|
||||
* TODO: meter a una funcion a parte task con los 3 pasos
|
||||
*/
|
||||
public async getPendingOperations() {
|
||||
console.log("[i] Inicio revision de peticiones")
|
||||
// 1. Se obtienen todas las operaciones pendientes de la BDD
|
||||
const pendingOperations = await this.operationsRepository.getPendingOperations()
|
||||
|
||||
|
||||
if (pendingOperations.error != undefined) {
|
||||
throw new Error("Error obteniendo las tareas pendientes " + pendingOperations.error)
|
||||
}
|
||||
|
||||
if (pendingOperations.data == undefined || pendingOperations.data.length == 0) {
|
||||
//Nada pendiente
|
||||
console.log("[cron] No hay operaciones pendientes de Objenious")
|
||||
return;
|
||||
}
|
||||
|
||||
// 2. Clasificación de las tareas pendientes
|
||||
// Erroneas => no se les ha dado un request_id, no se pueden comprobar
|
||||
const erroneas = pendingOperations.data
|
||||
.filter((e) => e.request_id == undefined)
|
||||
|
||||
// Todas las validas
|
||||
const operacionesValidas = pendingOperations.data
|
||||
.filter((e) => e.request_id != undefined)
|
||||
|
||||
// Validas sin MassId
|
||||
const solicitarMassId = operacionesValidas
|
||||
.filter((e) => e.mass_action_id == undefined)
|
||||
|
||||
// Validas con MassId
|
||||
const consultarEstado = pendingOperations.data
|
||||
.filter(e => e.mass_action_id != undefined)
|
||||
// TODO: Validas sin/con massID que lleven mucho tiempo sin actualizarse
|
||||
|
||||
console.log("[cron] Solicitando mass id para", solicitarMassId.map(e => e.id))
|
||||
|
||||
const newMassActions = await this.getMassIdFromRequest(solicitarMassId)
|
||||
|
||||
const merged = [...newMassActions || [], ...consultarEstado]
|
||||
|
||||
console.log("[cron] Solicitando status para", merged.map(e => e.id))
|
||||
|
||||
const result = await this.getMassActionsStatus(merged)
|
||||
|
||||
console.log("[o] Revisión de eventos completa")
|
||||
}
|
||||
|
||||
/**
|
||||
* Para una lista de operaciones **con mass_action_id** se comprueba si han tenido alguna actualizacion
|
||||
* Devuelve el numero de operaciones comprobadas.
|
||||
* TODO: Esto va en un repositorio
|
||||
*/
|
||||
private async getMassActionsStatus(requestList: ObjeniousOperation[]) {
|
||||
if (requestList.length == 0) return;
|
||||
if (requestList.length == 0) return 0;
|
||||
|
||||
const operationsList = structuredClone(requestList)
|
||||
const PATH = "/actions/massActions/"
|
||||
@@ -90,8 +102,10 @@ export class CheckObjeniousRequests {
|
||||
// 2. Se comprueba si ha habido un cambio de estado
|
||||
const { id, status, info } = data
|
||||
|
||||
if (status != originalAction.objenious_status) {
|
||||
const hasStatusChanged = status != originalAction.objenious_status
|
||||
if (hasStatusChanged) {
|
||||
console.log("[cron] Actualizando", originalAction.id, originalAction.iccids, status)
|
||||
/** Status convertido al que se usa en la aplicacion */
|
||||
const uorStatus = this.mapStatus(status)
|
||||
const updateData: ObjeniousOperationChange = {
|
||||
operation_id: originalAction.id!,
|
||||
@@ -101,28 +115,43 @@ export class CheckObjeniousRequests {
|
||||
previous_status: originalAction.status
|
||||
}
|
||||
|
||||
originalAction.status = uorStatus;
|
||||
originalAction.objenious_status = status;
|
||||
originalAction.last_change_date = new Date().toISOString()
|
||||
console.log(" ----> Status", uorStatus)
|
||||
if (uorStatus /*== "finished"*/) {
|
||||
const updatedAction = structuredClone(originalAction)
|
||||
|
||||
updatedAction.status = uorStatus;
|
||||
updatedAction.objenious_status = status;
|
||||
updatedAction.last_change_date = new Date().toISOString()
|
||||
updatedAction.end_date = originalAction.last_change_date
|
||||
|
||||
if (uorStatus == "finished") {
|
||||
console.log(" ****> Status", uorStatus)
|
||||
const targetIccids = JSON.parse(originalAction.iccids || "[]") as string[]
|
||||
const targetIccids = originalAction.iccids
|
||||
const lineData = await this.getLineData(targetIccids)
|
||||
console.log("lineData", lineData.content[0])
|
||||
console.log("[i] lineData", lineData.content[0])
|
||||
const msisdn = lineData.content[0].identifier.msisdn
|
||||
|
||||
this.notifyFinalization({
|
||||
...originalAction,
|
||||
msisdn
|
||||
})
|
||||
.then(e => {
|
||||
console.log("Notificada la activacion de ", originalAction.iccids)
|
||||
})
|
||||
.catch(e => {
|
||||
console.error("Error enviando la activacion de ", originalAction)
|
||||
console.error(e)
|
||||
if (originalAction.correlation_id != undefined) {
|
||||
this.orderRepository.finishOrder({ correlation_id: originalAction.correlation_id })
|
||||
.then(e => console.log("[o] Finalizada order", e))
|
||||
.catch(e => {
|
||||
console.error("[x] Error finalizando la order ", e)
|
||||
console.error(e)
|
||||
})
|
||||
}
|
||||
|
||||
if (originalAction.operation == "activate") {
|
||||
this.notifyFinalization({
|
||||
...originalAction,
|
||||
msisdn
|
||||
})
|
||||
// TODO la accion no siempre es activacion!
|
||||
.then(e => {
|
||||
console.log("[o] Notificada la activacion de ", originalAction.iccids)
|
||||
})
|
||||
.catch(e => {
|
||||
console.error("[x] Error enviando la activacion de ", originalAction)
|
||||
console.error(e)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (info != undefined) {
|
||||
@@ -130,12 +159,12 @@ export class CheckObjeniousRequests {
|
||||
}
|
||||
|
||||
try {
|
||||
console.log("Subiendo un update")
|
||||
console.log("[i] Subiendo un update")
|
||||
console.log(updateData)
|
||||
await this.operationsRepository.updateOperation(updateData)
|
||||
updated.push(originalAction)
|
||||
} catch (e) {
|
||||
console.error("Error actualizando el estado de ", originalAction, e)
|
||||
console.error("[x] Error actualizando el estado de ", originalAction, e)
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -149,14 +178,15 @@ export class CheckObjeniousRequests {
|
||||
// ! Importante las claves siempre en minuscula, los valores son cammelCase
|
||||
const equivalentMap = new Map<string, StatusEnum>([
|
||||
["en cours", "running"],
|
||||
["terminé", "finished"]
|
||||
["terminé", "finished"],
|
||||
["annulé", "finished"]
|
||||
])
|
||||
const res = equivalentMap.get(sanitizedStatus)
|
||||
if (res == undefined) return "running"
|
||||
return res
|
||||
}
|
||||
|
||||
private async getLineData(iccids: string[]) {
|
||||
private async getLineData(iccids: string) {
|
||||
const PATH = "/lines"
|
||||
|
||||
const req = this.httpClient.client.get(PATH, {
|
||||
@@ -187,7 +217,7 @@ export class CheckObjeniousRequests {
|
||||
const PATH = "/actions/requests/"
|
||||
const operationsList = structuredClone(requestList)
|
||||
|
||||
|
||||
// TODO: El for es gigantesco hay que simplificar partes
|
||||
for (const request of operationsList) {
|
||||
if (request.id == undefined) continue;
|
||||
|
||||
@@ -200,13 +230,50 @@ export class CheckObjeniousRequests {
|
||||
try {
|
||||
res = await req
|
||||
} catch (e) {
|
||||
console.error("Error comprobando el estado de ", request, e)
|
||||
//todo actualizar el estado para incluir el error
|
||||
console.error("[x] Error comprobando el estado de ", request, e)
|
||||
continue;
|
||||
}
|
||||
|
||||
// 2. Casos de error o id no generada
|
||||
if (res.data.massActionIds.length == 0) {
|
||||
// Si no hay es que *puede* que haya un problema o no se ha generado todavia
|
||||
const reports = res.data.actionRequestReports
|
||||
// Se entiende que no hay report ni id = está a la espera
|
||||
if (reports.length == 0) continue;
|
||||
|
||||
// ! Hay minimo un report -> se considera error y se para
|
||||
const updateData: ObjeniousOperationChange = {
|
||||
operation_id: request.id,
|
||||
new_status: "error",
|
||||
error: JSON.stringify(reports[0].actionRequestReportDataDTOs)
|
||||
}
|
||||
|
||||
const updateRes = await this.operationsRepository.updateOperation(updateData)
|
||||
if (updateRes.error != undefined) {
|
||||
console.error("[x] Error actualizando el estado de la operacion", updateData.error)
|
||||
}
|
||||
|
||||
if (request.correlation_id != undefined) {
|
||||
this.orderRepository.errorOrder({
|
||||
correlation_id: request.correlation_id,
|
||||
status: "failed",
|
||||
error: "MassId no obtenida",
|
||||
reason: "MassId no obtenida",
|
||||
stackTrace: JSON.stringify(reports[0].actionRequestReportDataDTOs)
|
||||
}).then(e => {
|
||||
if (e.error != undefined) {
|
||||
console.error("[x] Error actualizando el estado del Order con correlation_id: ", request.correlation_id)
|
||||
console.error(e.error)
|
||||
}
|
||||
}).catch(e => {
|
||||
console.error("[x] Error actualizando el estado del Order con correlation_id: ", request.correlation_id)
|
||||
})
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// 2. Modificacion del massId si ha habido un cambio
|
||||
const massActionId = res.data.massActionIds[0]
|
||||
// 3. Modificacion del massId si ha habido un cambio
|
||||
try {
|
||||
if (res.status == 200 && res.data != undefined && massActionId != undefined) {
|
||||
const updateData: ObjeniousOperationChange = {
|
||||
@@ -220,7 +287,7 @@ export class CheckObjeniousRequests {
|
||||
request.mass_action_id = String(massActionId)
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("Error actualizando el estado de ", request)
|
||||
console.log("[x] Error actualizando el estado de ", request)
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@@ -234,12 +301,27 @@ export class CheckObjeniousRequests {
|
||||
* al servicio que manda los mails
|
||||
*/
|
||||
private async notifyFinalization(operation: ObjeniousOperation & { msisdn: string }) {
|
||||
console.log("Notificada, ", operation)
|
||||
const req = this.httpClient.client.post<any>(env.NOTIFICATION_URL,
|
||||
{ operation: operation }
|
||||
)
|
||||
await req
|
||||
console.log("[i] Enviando activacion a", env.NOTIFICATION_URL)
|
||||
console.log("[i] Operation", operation)
|
||||
const req = axios.post(env.NOTIFICATION_URL, {
|
||||
...operation,
|
||||
iccids: [operation.iccids]
|
||||
}, {
|
||||
headers: {
|
||||
"x-apikey-sim-activation": env.SIM_ACTIVATION_API_KEY
|
||||
}
|
||||
})
|
||||
try {
|
||||
const res = await req
|
||||
if (res.status != 200) {
|
||||
console.error("[x] Error enviando el mail de confirmacion para ", operation, " status ", res.status, res.statusText)
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[x] Error enviando el mail de confirmacion para ", operation)
|
||||
console.error(e)
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
191
packages/sim-objenious-cron/tasks/check_pause_terminate.ts
Normal file
191
packages/sim-objenious-cron/tasks/check_pause_terminate.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
import { ObjeniousLine } from "sim-shared/domain/objeniousLine.js";
|
||||
import { PauseCancelTaskRepository } from "sim-consumidor-objenious/infrastructure/PauseCancelTaskRepository.js";
|
||||
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js";
|
||||
import { SimUseCases } from "sim-consumidor-objenious/aplication/Sim.usecases.js";
|
||||
import { OrderRepository } from "packages/sim-shared/infrastructure/OrderRepository.js";
|
||||
|
||||
const logger =
|
||||
{
|
||||
log: (...data: any[]) => console.log("[i] [TaskPauseTerminate]", ...data),
|
||||
error: (...data: any[]) => console.error("[x] [TaskPauseTerminate] ", ...data),
|
||||
}
|
||||
|
||||
|
||||
export class PauseTerminateTask {
|
||||
constructor(
|
||||
private readonly objeniousRepo: ObjeniousOperationsRepository,
|
||||
private readonly pauseRepo: PauseCancelTaskRepository,
|
||||
private readonly simUsecases: SimUseCases,
|
||||
private readonly orderRepo: OrderRepository
|
||||
) {
|
||||
}
|
||||
|
||||
public async run() {
|
||||
const finError = (err: any) => {
|
||||
logger.error("Finalizado con errores proceso de comprobacion de lineas en pausa o canceladas")
|
||||
logger.error(err)
|
||||
}
|
||||
|
||||
const finExito = () => {
|
||||
logger.log("Finalizado con exito proceso de comprobacion de lineas en pausa o canceladas")
|
||||
}
|
||||
try {
|
||||
logger.log("Iniciando proceso de comprobacion de lineas en pausa o canceladas")
|
||||
|
||||
// 1. Se comprueba cuantas peticiones hay qye revisar
|
||||
const peticionesRevisar = await this.pauseRepo.getPending()
|
||||
|
||||
if (peticionesRevisar.error != undefined) {
|
||||
finError(peticionesRevisar.error)
|
||||
return 1;
|
||||
}
|
||||
|
||||
logger.log(`Se van a revisar ${peticionesRevisar.data?.length} peticiones`)
|
||||
if (peticionesRevisar.data == undefined || peticionesRevisar.data.length == 0) {
|
||||
finExito()
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
// 2. Se comprueba que alguna de las lineas haya dejado de estar en estado de test
|
||||
const iccids = peticionesRevisar.data.map(e => e.iccid)
|
||||
const lineasActualizadas: ObjeniousLine[] = []
|
||||
|
||||
const lineGenerator = this.objeniousRepo.getLinesByStatusAPI({
|
||||
iccids: iccids
|
||||
})
|
||||
|
||||
let lines = await lineGenerator.next()
|
||||
|
||||
if (lines.value.error != undefined || lines.value.data == undefined) {
|
||||
logger.error("Error cargando las lineas", lines.value.error)
|
||||
finError(lines.value.error)
|
||||
return 1;
|
||||
} else {
|
||||
lineasActualizadas.push(...lines.value.data)
|
||||
}
|
||||
|
||||
while (!lines.done) {
|
||||
if (lines.value.error != undefined || lines.value.data == undefined) {
|
||||
logger.error("Error cargando las lineas", lines.value.error)
|
||||
finError(lines.value.error)
|
||||
return 1;
|
||||
} else {
|
||||
lineasActualizadas.push(...lines.value.data)
|
||||
}
|
||||
|
||||
lines = await lineGenerator.next()
|
||||
}
|
||||
|
||||
console.log("Cargado: ", lineasActualizadas)
|
||||
|
||||
// 3. Se separan las lineas que se tienen que actualizar al no ser test
|
||||
// y las que se tienen que reencolar al ser test
|
||||
const lineasNoTest = lineasActualizadas.filter(e => e.status.billingStatus != "TEST")
|
||||
const lineasTest = lineasActualizadas.filter(e => e.status.billingStatus == "TEST")
|
||||
|
||||
// 4. Las lineas de test se reencolan
|
||||
// El proximo reintento es en 1 dia
|
||||
const proximoReintento = new Date()
|
||||
proximoReintento.setDate(new Date().getDate() + 1)
|
||||
|
||||
// 5. Reintentos en 1 dia
|
||||
for (const linea of lineasTest) {
|
||||
const lineaId = peticionesRevisar.data
|
||||
.find(e => e.iccid == linea.identifier.iccid)?.id
|
||||
|
||||
if (lineaId == undefined) continue; // Esto puede ser un problema si se generaliza
|
||||
|
||||
this.pauseRepo.updateTask({
|
||||
id: lineaId,
|
||||
next_check: proximoReintento
|
||||
})
|
||||
}
|
||||
|
||||
// 6. Operaciones de pausa/cancelacion definitiva
|
||||
for (const linea of lineasNoTest) {
|
||||
const operacion = peticionesRevisar.data
|
||||
.find(e => e.iccid == linea.identifier.iccid)
|
||||
|
||||
if (operacion == undefined) continue;
|
||||
const dueDate = new Date()
|
||||
dueDate.setMinutes(new Date().getMinutes() + 15)
|
||||
|
||||
const operacionTipo = operacion.operation_type
|
||||
const actionData = operacion.action_data
|
||||
const correlation_id = operacion.action_data.correlation_id
|
||||
actionData.dueDate = dueDate.toISOString()
|
||||
|
||||
switch (linea.status.billingStatus) {
|
||||
case "ACTIVATED":
|
||||
let result = null;
|
||||
|
||||
// Se termina el proceso aqui pero pasa a ser una operación de
|
||||
// objenious por lo que puede fallar y quedaria registrado en
|
||||
// la tabla objenious_operation
|
||||
switch (operacionTipo) {
|
||||
case "suspend":
|
||||
result = await this.simUsecases.suspend(actionData)()
|
||||
break;
|
||||
case "terminate":
|
||||
result = await this.simUsecases.terminate(actionData)()
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
if (result == undefined) {
|
||||
logger.error("Operacion desconocida", operacion)
|
||||
} else if (result?.error != undefined) {
|
||||
// error usecase
|
||||
logger.error(result.error)
|
||||
await this.pauseRepo.finishTask({
|
||||
id: operacion.id,
|
||||
error: result.error
|
||||
})
|
||||
if (correlation_id != undefined)
|
||||
await this.orderRepo.errorOrder({
|
||||
correlation_id: correlation_id,
|
||||
status: "dlx",
|
||||
reason: result.error
|
||||
})
|
||||
} else {
|
||||
// ok
|
||||
await this.pauseRepo.finishTask({ id: operacion.id })
|
||||
if (correlation_id != undefined)
|
||||
await this.orderRepo.finishOrder({ correlation_id })
|
||||
}
|
||||
|
||||
break;
|
||||
case "CANCELED":
|
||||
await this.pauseRepo.finishTask({
|
||||
id: operacion.id,
|
||||
error: "billingStatus is CANCELED"
|
||||
})
|
||||
if (correlation_id != undefined)
|
||||
await this.orderRepo.finishOrder({ correlation_id })
|
||||
break;
|
||||
case "SUSPENDED":
|
||||
await this.pauseRepo.finishTask({
|
||||
id: operacion.id,
|
||||
error: "billingStatus is SUSPENDED"
|
||||
})
|
||||
if (correlation_id != undefined)
|
||||
await this.orderRepo.finishOrder({ correlation_id })
|
||||
break;
|
||||
case "TEST":
|
||||
// No puede ser
|
||||
default:
|
||||
logger.error("billingStatus desconocido", linea.status.billingStatus)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
finExito()
|
||||
} catch (e) {
|
||||
finError(e)
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user