63 Commits

Author SHA1 Message Date
70bf73b0a4 Error query 2026-04-10 11:11:12 +02:00
e3849d8217 Archivos de migraciones 2026-04-09 12:48:36 +02:00
d9854a12a8 Version de migrate 2026-04-09 12:31:06 +02:00
48d387a8da Migraciones antes de lanzar start 2026-04-09 12:10:40 +02:00
93d3e13793 Merge pull request 'WEBINT-328-Pausas-cacelaciones' (#1) from WEBINT-328-Pausas-cacelaciones into main
Reviewed-on: #1
2026-04-09 10:03:47 +00:00
031f5d5cf0 nombre de columna con mayus 2026-04-09 11:59:06 +02:00
047669bab2 Acabados de corregir bugs 2026-04-09 11:53:49 +02:00
5ea5939e3a Bug de finaliazacion de tareas erroneas 2026-04-09 09:08:11 +02:00
7ff3f13af4 Funcionan las suspensiones 2026-04-08 17:37:47 +02:00
a9589f578b Solucionado cierrre de pool para test 2026-04-08 14:47:57 +02:00
a27e4b30d2 Cron completo y mejora de logs 2026-04-08 13:48:57 +02:00
4168949b9e Endpoint preparados 2026-04-08 10:08:54 +02:00
e6ff54a15d Usecases 2026-04-07 17:43:17 +02:00
3956797020 Las operaciones basicas del repositorio de pause/cancel funcionan y
tienen test
2026-04-07 15:40:19 +02:00
7d88359263 Refactor de jwt y base de la bdd de pausas-cancelaciones 2026-04-07 13:20:31 +02:00
1b6da651a6 Ajustado el periodo de comprobaciones 2026-03-27 12:47:10 +01:00
9b305f887f Test .env ajustado 2026-03-27 12:24:20 +01:00
9506b9e28e Error de nombre de activacion 2026-03-27 10:59:15 +01:00
61c0edca07 Logs del envio 2026-03-27 10:52:03 +01:00
9470b5605d Pribando el env 2026-03-27 10:50:03 +01:00
9d63d23754 Mejor gestion de errores para los order 2026-03-26 12:21:28 +01:00
a95655a2a6 Completada la tarea de volcado 2026-03-26 09:29:09 +01:00
025801a689 Repositorio de lineas funciona 2026-03-25 11:51:14 +01:00
28880c4d99 Lineas activas e insertar cada una 2026-03-24 17:27:52 +01:00
5bb3bc554b doc 2026-03-24 11:20:59 +01:00
cfb907b840 Copy en jenkins 2026-03-11 12:34:41 +01:00
d5d7953fd2 Endpoint para documentacion 2026-03-11 12:31:17 +01:00
96298aab25 Docs en HTML 2026-03-11 11:35:16 +01:00
c17cca1e81 Sobreescribia el registerSobreescribia el register 2026-03-10 10:43:56 +01:00
7264efcf79 Errata 2026-03-10 10:42:32 +01:00
8934bcd603 Copia yarnrc 2026-03-10 10:39:39 +01:00
bdd08dbc56 Copiar yarnrc a docker 2026-03-10 10:37:26 +01:00
7d47fde806 Solucionado problema db-migrate 2026-03-10 10:21:53 +01:00
ad207fb732 db-migrate 2026-03-10 09:34:17 +01:00
bd9081b5bc hardcodeado el customerAccountCode 2026-03-06 11:18:30 +01:00
a429e9d14a Errata customer 2026-03-06 11:13:47 +01:00
81eb986313 Error de tipado 2026-03-06 11:06:15 +01:00
58bedc42f1 Bug de correlation_id en las llamadas a objenious 2026-03-06 11:02:18 +01:00
b97f422261 Prod 2026-03-05 10:33:38 +01:00
7a7dc33724 Error de prod 2026-03-05 10:30:43 +01:00
7743bd1f0d Migraciones a mano de momento 2026-03-05 10:17:26 +01:00
2897d7aa3c Probando a añadir el registro desde jenkins 2026-03-05 10:10:12 +01:00
0fd7eafcf3 Eliminado el clone 2026-03-05 09:09:02 +01:00
71253d216e Registry local 2026-03-05 09:04:50 +01:00
aeea6cfefd Probando con clone 2026-03-04 16:49:07 +01:00
e8eb925834 Sin paso 2026-03-04 15:54:13 +01:00
7cf9cc60e6 Test jenkins 2026-03-04 15:52:43 +01:00
1e9818d430 Yarn lock 2026-03-04 14:03:11 +01:00
39c0e87758 Mejora de las orders y actualizacion docs 2026-03-04 13:51:24 +01:00
5771972e2a Revesriendo cambio del docker 2026-03-02 17:19:07 +01:00
ea13403dc3 Error https 2026-03-02 17:16:34 +01:00
8d9a9b84b8 Cambiando el lock a mano 2026-03-02 17:15:04 +01:00
9b92f3506b Ya no hace falta la eliminacion explicita 2026-03-02 16:57:46 +01:00
1798118f6b Sin yarn.lock que copiar 2026-03-02 16:51:12 +01:00
eba2b8c569 Ya con la eliminacion del lock 2026-03-02 16:48:18 +01:00
b6b2cf6cc8 El inmutable 2026-03-02 16:46:29 +01:00
a0faa2d105 Jenkins 2026-03-02 16:45:35 +01:00
d323f804fc No copiar el lock 2026-03-02 16:41:37 +01:00
978454754c Eliminado yarn lock 2026-03-02 16:29:35 +01:00
b6091b15da docker con clean del cache 2026-03-02 16:23:05 +01:00
a6794a061b Yarn install 2026-03-02 16:01:00 +01:00
fafea3ce04 http 2026-03-02 15:55:32 +01:00
992f639f35 Prueba con otra url para gitea 2026-03-02 15:38:42 +01:00
67 changed files with 2179 additions and 2350 deletions

10
.env
View File

@@ -5,8 +5,8 @@ RABBITMQ_PASSWORD=guest
ENVIORMENT=development ENVIORMENT=development
RABBITMQ_HOST=rabbitmq-sim-broker #RABBITMQ_HOST=rabbitmq-sim-broker
# RABBITMQ_HOST=localhost RABBITMQ_HOST=localhost
RABBITMQ_PORT=5672 RABBITMQ_PORT=5672
RABBITMQ_USER=guest RABBITMQ_USER=guest
RABBITMQ_PASSWORD=guest RABBITMQ_PASSWORD=guest
@@ -14,13 +14,13 @@ RABBITMQ_SECURE=false
RABBITMQ_VHOST=sim-vhost RABBITMQ_VHOST=sim-vhost
# Hay cosas que unificar de varios servicios # Hay cosas que unificar de varios servicios
POSTGRES_HOST=postgresql-sim #POSTGRES_HOST=postgresql-sim
# POSTGRES_HOST=localhost POSTGRES_HOST=localhost
POSTGRES_DB=postgres POSTGRES_DB=postgres
POSTGRES_DATABASE=postgres POSTGRES_DATABASE=postgres
POSTGRES_PORT=5433 POSTGRES_PORT=5433
POSTGRES_USER=postgres POSTGRES_USER=postgres
POSTGRES_PASSWORD=1234 POSTGRES_PASSWORD='1234'
# Para el postgres local para generar el script de resultado de migraciones # Para el postgres local para generar el script de resultado de migraciones
PGHOST=localhost PGHOST=localhost

View File

@@ -3,3 +3,9 @@ compressionLevel: mixed
enableGlobalCache: false enableGlobalCache: false
nodeLinker: node-modules nodeLinker: node-modules
npmScopes:
sf-alvar:
npmRegistryServer: "https://git.savefamilygps.net/api/packages/SaveFamily/npm/"
npmRegistryServer: "https://registry.npmjs.org/"

View File

@@ -1,10 +1,3 @@
#/bin/bash #/bin/bash
rm deployment/database/init.sql
# cat deployment/database/*.sql >deployment/database/init.sql
cp deployment/database/esquema_final* deployment/database/init.sql
# compatibilidad con postgresql < 17
sed -i '/\\restrict/d' deployment/database/init.sql
sed -i '/\\unrestrict/d' deployment/database/init.sql
docker compose -f deployment/local/docker/docker-compose.yaml --project-directory ./ build docker compose -f deployment/local/docker/docker-compose.yaml --project-directory ./ build

View File

@@ -1,7 +1,8 @@
# stage base para coordinar las fases de build y ejecucion # stage base para coordinar las fases de build y ejecucion
FROM node:22-alpine AS base FROM node:22-alpine AS base
WORKDIR /usr/local/app WORKDIR /usr/local/app
COPY ./package.json ./yarn.lock ./ COPY ./package.json ./
#COPY ./package.json ./yarn.lock ./
RUN corepack enable && \ RUN corepack enable && \
corepack prepare yarn@4.12.0 --activate corepack prepare yarn@4.12.0 --activate
# copia el codigo en general # copia el codigo en general

View File

@@ -0,0 +1,20 @@
CREATE table if not exists objenious_lines (
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
simId BIGINT UNIQUE,
status TEXT,
iccid TEXT NOT NULL,
msisdn TEXT,
imei TEXT,
imeiChangeDate TIMESTAMPTZ,
offerCode TEXT,
preactivationDate TIMESTAMPTZ, -- No viene con hora
activationDate TIMESTAMPTZ,
commercialStatus TEXT,
commercialStatusDate TIMESTAMPTZ,
billingStatus TEXT,
billingStatusChangeDate TIMESTAMPTZ,
billingActivationDate TIMESTAMPTZ,
createDate TIMESTAMPTZ,
raw JSONB,
hash TEXT
)

View File

@@ -0,0 +1,32 @@
/**
* Para la tarea WEBINT-328-Pausas-cacelaciones.
* Almacena las pausas/cancelaciones que no se han podido hacer porque la linea esta en
* "Test"
*/
DO $$ BEGIN
CREATE TYPE SUSPENDTERMINATE AS ENUM ('suspend','terminate');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
CREATE TABLE IF NOT EXISTS pause_cancel_tasks (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
iccid TEXT NOT NULL,
operation_type SUSPENDTERMINATE,
last_checked TIMESTAMPTZ, -- Última vez que se ha comprobado que no esté en test
activation_date TIMESTAMPTZ, -- Fecha de activacion para comprobar si ha pasdo un mes
next_check TIMESTAMPTZ, -- Si se ha comprobado se asignará la siguiente fecha de revision
completed_date TIMESTAMPTZ, -- Cuando se ha completado, para bien o mal.
error TEXT,
action_data JSONB -- datos de la operacion original.
);
-- Indice de las tareas que no han terminado
CREATE INDEX idx_pause_cancel_tasks_pending
ON pause_cancel_tasks (next_check)
WHERE completed_date IS NULL;

View File

@@ -6,13 +6,14 @@ WORKDIR /home/node/app
RUN corepack enable RUN corepack enable
COPY ./dist/packages ./packages COPY ./dist/packages ./packages
COPY ./.yarnrc.yml ./
COPY ./docs ./docs
# Para las migraciones
COPY ./deployment ./deployment
COPY ./package.json ./ COPY ./package.json ./
# Force node-modules linker (no .yarnrc.yml in build context) RUN yarn install
RUN echo 'nodeLinker: node-modules' > .yarnrc.yml
RUN yarn install
RUN mkdir -p dist && ln -sf ../packages dist/packages RUN mkdir -p dist && ln -sf ../packages dist/packages

View File

@@ -1,4 +1,6 @@
#!/bin/sh #!/bin/sh
cd /home cd /home
cd /home/node/app && yarn start cd /home/node/app
yarn migrate
yarn start

View File

@@ -22,7 +22,7 @@ pipeline {
} }
stage("🧱 Building") { stage("🧱 Building") {
steps { steps {
sh 'rm -rf dist/' sh 'rm -rf dist/'
sh 'yarn run build' sh 'yarn run build'
} }
} }
@@ -60,11 +60,15 @@ pipeline {
sourceFiles: "dist/**/*", sourceFiles: "dist/**/*",
excludes: "dist/**/node_modules/**" excludes: "dist/**/node_modules/**"
), ),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "docs/**/*",
),
sshTransfer( sshTransfer(
cleanRemote: false, cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH", remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "deployment/database/**/*", sourceFiles: "deployment/database/**/*",
removePrefix: "deployment",
), ),
sshTransfer( sshTransfer(
cleanRemote: false, cleanRemote: false,
@@ -88,6 +92,11 @@ pipeline {
remoteDirectory: "$APP_REMOTE_PATH", remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "package.json", sourceFiles: "package.json",
), ),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: ".yarnrc.yml",
),
sshTransfer( sshTransfer(
cleanRemote: false, cleanRemote: false,
execCommand: "sh $APP_REMOTE_PATH/rebuild.sh" execCommand: "sh $APP_REMOTE_PATH/rebuild.sh"

View File

@@ -14,6 +14,7 @@ COPY ./packages ./packages
COPY tsconfig*.json ./ COPY tsconfig*.json ./
COPY .env* ./ COPY .env* ./
COPY ./.yarnrc.yml ./ COPY ./.yarnrc.yml ./
COPY ./docs ./docs
COPY ./deployment/local/docker/start.sh ./ COPY ./deployment/local/docker/start.sh ./
# Copiar el archivo de migrations? porque ahora no creo que se esté lanzando nada # Copiar el archivo de migrations? porque ahora no creo que se esté lanzando nada
COPY ./deployment/database/migrations ./deployment/database/migrations COPY ./deployment/database/migrations ./deployment/database/migrations

View File

@@ -40,6 +40,9 @@ services:
- path: ./packages - path: ./packages
action: sync action: sync
target: /usr/local/app/packages target: /usr/local/app/packages
- path: ./docs
action: sync
target: /usr/local/app/docs
- path: ./package.json - path: ./package.json
action: rebuild action: rebuild
ports: ports:
@@ -72,7 +75,6 @@ services:
- "${POSTGRES_PORT}:${POSTGRES_PORT}" - "${POSTGRES_PORT}:${POSTGRES_PORT}"
volumes: volumes:
- ./sql-data/:/var/lib/postgres/data - ./sql-data/:/var/lib/postgres/data
- ./deployment/database/init.sql:/docker-entrypoint-initdb.d/init.sql
healthcheck: healthcheck:
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"] test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"]
interval: 5s interval: 5s

File diff suppressed because one or more lines are too long

View File

@@ -11,7 +11,7 @@ post {
} }
body:form-urlencoded { body:form-urlencoded {
iccid: 8933201125065160331 iccid: 8933201125068890694
offer: SAVEFAMILY1 offer: SAVEFAMILY1
} }

View File

@@ -18,3 +18,38 @@ settings {
encodeUrl: true encodeUrl: true
timeout: 0 timeout: 0
} }
docs {
El endpoint recibe como body
```
{
iccid: string,
update_webhook?: string
}
```
`update_webhook` está en desarrollo, pero será donde se mande la actualizacion de la cancelación cuando haya una respuesta de la API externa.
Si la llamada tiene exito devuelve:
``` json
{
data: {
iccid: string,
message_id: string,
operation: "cancelation"
}
}
```
message_id se usará para la llamada /orders/message_id/}{message_id}
Si la llamada falla devolvera:
```json
{
errors: {
msg: string
... (campos extra de gestion del error)
}
}
```
}

16
docs/sim-api/Docs.bru Normal file
View File

@@ -0,0 +1,16 @@
meta {
name: Docs
type: http
seq: 12
}
get {
url: {{baseurl}}/docs/sim-api-documentation.html
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -15,7 +15,7 @@ params:query {
} }
body:form-urlencoded { body:form-urlencoded {
iccid: 8933201125065160414 iccid: 8933201125068886692
} }
settings { settings {

View File

@@ -0,0 +1,34 @@
docs {
Los endpoint tienen unos campos comunes de entrada:
```ts
{
iccid: string,
update_webhook?: string
}
```
`update_webhook` está en desarrollo, pero será donde se mande la actualizacion de la cancelación cuando haya una respuesta de la API externa.
Si la llamada tiene exito devuelve:
```ts
{
data: {
iccid: string,
message_id: string,
operation: string,
}
}
```
message_id se usará para la llamada /orders/message_id/}{message_id}
Si la llamada falla devolvera:
```ts
{
errors: {
msg: string
... (campos extra de gestion del error)
}
}
```
}

View File

@@ -1,3 +1,4 @@
vars { vars {
baseurl: http://localhost:3000 baseurl: http://localhost:3000
} }
color: #2E8A54

View File

@@ -1,3 +1,4 @@
vars { vars {
baseurl: https://sf-sims.savefamilygps.net baseurl: https://sf-sims.savefamilygps.net
} }
color: #CE4F3B

View File

@@ -5,16 +5,16 @@ meta {
} }
get { get {
url: https://api-getway.objenious.com/ws/lines?pageSize=10&identifier.identifierType=ICCID&identifier.identifiers=8933201125065160455 url: https://api-getway.objenious.com/ws/lines?pageSize=1000&simStatus=ACTIVATED
body: formUrlEncoded body: formUrlEncoded
auth: bearer auth: bearer
} }
params:query { params:query {
pageSize: 10 pageSize: 1000
identifier.identifierType: ICCID simStatus: ACTIVATED
identifier.identifiers: 8933201125065160455 ~identifier.identifierType: ICCID
~simStatus: ACTIVATED ~identifier.identifiers: 8933201125065160455
} }
auth:bearer { auth:bearer {

View File

@@ -37,7 +37,7 @@ body:form-urlencoded {
} }
vars:pre-request { vars:pre-request {
params.id: 14557 params.id: 15102
} }
settings { settings {

1843
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -19,12 +19,12 @@
"migrate": "yarn db-migrate -e .env -m deployment/database/migrations -t 99.0.0" "migrate": "yarn db-migrate -e .env -m deployment/database/migrations -t 99.0.0"
}, },
"dependencies": { "dependencies": {
"@sf-alvar/db-migrate": "1.0.6",
"@tsconfig/node22": "^22.0.5", "@tsconfig/node22": "^22.0.5",
"amqp-connection-manager": "^5.0.0", "amqp-connection-manager": "^5.0.0",
"amqplib": "^0.10.9", "amqplib": "^0.10.9",
"axios": "^1.13.3", "axios": "^1.13.3",
"cors": "^2.8.5", "cors": "^2.8.5",
"db-migrate": "https://git.savefamilygps.net/alvarsanmartin/herramienta-migracion.git",
"dotenv": "^17.2.3", "dotenv": "^17.2.3",
"express": "^5.2.1", "express": "^5.2.1",
"pg": "^8.18.0", "pg": "^8.18.0",

View File

@@ -3,20 +3,3 @@ RABBITMQ_USER=guest
RABBITMQ_PASSWORD=guest RABBITMQ_PASSWORD=guest
ENVIORMENT=development ENVIORMENT=development
RABBITMQ_HOST=rabbitmq-sim-broker
#RABBITMQ_HOST=localhost
RABBITMQ_PORT=5672
RABBITMQ_USER=guest
RABBITMQ_PASSWORD=guest
RABBITMQ_SECURE=false
RABBITMQ_VHOST=sim-vhost
# Hay cosas que unificar de varios servicios
POSTGRES_DB=postgres
POSTGRES_DATABASE=postres
POSTGRES_HOST=postgresql-sim-1
POSTGRES_PORT=5432
DEV_POSTGRES_PORT=5432
POSTGRES_USER=postgres
POSTGRES_PASSWORD=1234

View File

@@ -0,0 +1,118 @@
import { describe, it, beforeEach, mock, after } from "node:test";
import assert from "node:assert";
import { SimController } from "./Sim.controller.js";
import { EventBus } from "sim-shared/domain/EventBus.port.js";
import { SimUseCases } from "./Sim.usecases.js";
import { ConsumeMessage } from "amqplib";
import { postgrClient, pgPool } from "#config/postgreConfig.js";
import { httpInstance } from "#config/httpClient.config.js";
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
import { PauseCancelTaskRepository } from "#adapters/PauseCancelTaskRepository.js";
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js";
import { ActionData } from "#domain/DTOs/objeniousapi.js";
describe("SimController Integration Tests (Real UseCases)", () => {
let eventBusMock: any;
let controller: SimController;
let useCases: SimUseCases;
beforeEach(() => {
// Mock ONLY the event bus as requested
eventBusMock = {
publish: mock.fn(),
addSubscribers: mock.fn(),
consume: mock.fn(),
ack: mock.fn(async () => { }),
nack: mock.fn(async () => { }),
};
const operationRepository = new ObjeniousOperationsRepository(
httpInstance,
postgrClient,
);
const orderRepository = new OrderRepository(postgrClient);
const pauseRepository = new PauseCancelTaskRepository(postgrClient);
useCases = new SimUseCases({
httpClient: httpInstance,
operationRepository: operationRepository,
orderRepository: orderRepository,
pauseRepository: pauseRepository
});
// @ts-expect-error
useCases.findActivationDate = async (data: ActionData) => new Date()
controller = new SimController(eventBusMock as unknown as EventBus, useCases);
});
const createMockMsg = (payload: any): ConsumeMessage => {
return {
content: Buffer.from(JSON.stringify(payload)),
fields: {},
properties: {
headers: {
message_id: "test-correlation-id"
}
},
} as unknown as ConsumeMessage;
};
after(async () => {
await pgPool.end();
});
describe("suspend", () => {
it("should call stage_suspend and interact with DB and EventBus", async () => {
const iccid = "test-iccid-suspend-" + Date.now();
const msg = createMockMsg({
key: "sim.test.pause",
payload: {
iccid: iccid
},
headers: {
message_id: "correlation-suspend-" + iccid
}
});
const handler = controller.suspend();
await handler(msg);
// Verify that it reached the stage_suspend logic (which adds to pauseRepository)
// We can query the DB or check if ACK was called
assert.strictEqual(eventBusMock.ack.mock.callCount(), 1, "Message should be ACKed on success");
assert.strictEqual(eventBusMock.nack.mock.callCount(), 0, "Message should not be NACKed");
});
});
describe("terminate", () => {
it("should call stage_terminate and interact with DB and EventBus", async () => {
const iccid = "test-iccid-terminate-" + Date.now();
const msg = createMockMsg({
key: "sim.test.pause",
payload: {
iccid: iccid
},
headers: {
message_id: "correlation-terminate-" + iccid
}
});
const handler = controller.terminate();
await handler(msg);
assert.strictEqual(eventBusMock.ack.mock.callCount(), 1, "Message should be ACKed on success");
assert.strictEqual(eventBusMock.nack.mock.callCount(), 0, "Message should not be NACKed");
});
});
describe("Error Handling", () => {
it("should nack if message is invalid", async () => {
const msg = {
content: Buffer.from("invalid json"),
fields: {},
properties: {},
} as unknown as ConsumeMessage;
const handler = controller.suspend();
await assert.rejects(handler(msg), "Error de suspension consumiendo el mensaje no es valido");
});
});
});

View File

@@ -3,7 +3,7 @@ import { ConsumeMessage } from "amqplib";
import { SimUseCases } from "./Sim.usecases.js"; import { SimUseCases } from "./Sim.usecases.js";
import { SimEvents } from "sim-shared/domain/SimEvents.js"; import { SimEvents } from "sim-shared/domain/SimEvents.js";
import { Result } from "sim-shared/domain/Result.js"; import { Result } from "sim-shared/domain/Result.js";
import { env } from "#config/env/index.js"; import { ActionData } from "#domain/DTOs/objeniousapi.js";
/** /**
* La clase usa generadores de funciones para mantener el contexto * La clase usa generadores de funciones para mantener el contexto
@@ -37,6 +37,7 @@ export class SimController {
} catch (error) { } catch (error) {
console.error('Error al decodificar JSON:', error); console.error('Error al decodificar JSON:', error);
console.error(Buffer.from(msg.content).toString(("utf8")))
// Aquí podrías decidir devolver el string crudo o null // Aquí podrías decidir devolver el string crudo o null
return undefined; return undefined;
} }
@@ -86,7 +87,7 @@ export class SimController {
const resp = await this.tryUseCase(msg, this.useCases.activate({ const resp = await this.tryUseCase(msg, this.useCases.activate({
correlation_id: msgData.headers?.message_id, correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(DUE_DATE_SECONDS).toISOString(), dueDate: this.genDueDate(DUE_DATE_SECONDS).toISOString(),
customerAccountCode: env.OBJ_CUSTOMER_CODE, customerAccountCode: "9.49411.10",
identifier: { identifier: {
identifierType: "ICCID", identifierType: "ICCID",
identifiers: [iccid] identifiers: [iccid]
@@ -157,6 +158,9 @@ export class SimController {
} }
} }
/**
* Lo mismo que pause
*/
public suspend() { public suspend() {
return async (msg: ConsumeMessage) => { return async (msg: ConsumeMessage) => {
let msgData; let msgData;
@@ -171,14 +175,18 @@ export class SimController {
} }
const iccid = msgData.payload.iccid const iccid = msgData.payload.iccid
const res = await this.tryUseCase(msg, this.useCases.suspend({ const suspendData: ActionData = {
correlation_id: msgData.headers?.message_id, correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(2 * 60).toISOString(), dueDate: this.genDueDate(2 * 60).toISOString(),
identifier: { identifier: {
identifierType: "ICCID", identifierType: "ICCID",
identifiers: [iccid] identifiers: [iccid] // Por algún motivo solo he puesto un iccd por identifier
} }
})) }
const useCaseRes = await this.tryUseCase(msg, this.useCases.stage_suspend(suspendData))
/*
const res = await this.tryUseCase(msg, this.useCases.suspend(actionData))
*/
} }
} }
@@ -195,16 +203,20 @@ export class SimController {
if (msgData == undefined) { if (msgData == undefined) {
return Promise.reject("Mensaje invalido") return Promise.reject("Mensaje invalido")
} }
const iccid = msgData.payload.iccid const iccid = msgData.payload.iccid
console.log("Mensaje procesado", msgData) const terminateActionData: ActionData = {
const res = await this.tryUseCase(msg, this.useCases.terminate({
correlation_id: msgData.headers?.message_id, correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(2 * 60).toISOString(), dueDate: this.genDueDate(2 * 60).toISOString(),
identifier: { identifier: {
identifierType: "ICCID", identifierType: "ICCID",
identifiers: [iccid] identifiers: [iccid]
} }
})) }
//const res = await this.tryUseCase(msg, this.useCases.terminate(terminateActionData))
const res = await this.tryUseCase(msg, this.useCases.stage_terminate(terminateActionData))
} }
} }

View File

@@ -5,6 +5,8 @@ import { Result } from "sim-shared/domain/Result.js"
import { ObjeniousOperation, IOperationsRepository as OperationsRepositoryPort } from "sim-shared/domain/operationsRepository.port.js" import { ObjeniousOperation, IOperationsRepository as OperationsRepositoryPort } from "sim-shared/domain/operationsRepository.port.js"
import assert from "node:assert" import assert from "node:assert"
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js" import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
import { CreatePauseCancelTaskDTO, PauseCancelTaskRepository } from "#adapters/PauseCancelTaskRepository.js"
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js"
// TODO: // TODO:
// - Pasar a un archivo de DTOs // - Pasar a un archivo de DTOs
@@ -12,21 +14,24 @@ import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
export class SimUseCases { export class SimUseCases {
private readonly httpClient: HttpClient private readonly httpClient: HttpClient
private readonly operationRepository: OperationsRepositoryPort private readonly objeniousRepository: ObjeniousOperationsRepository
private readonly orderRepository: OrderRepository private readonly orderRepository: OrderRepository
private readonly pauseRepository: PauseCancelTaskRepository
constructor(args: { constructor(args: {
httpClient: HttpClient, httpClient: HttpClient,
operationRepository: OperationsRepositoryPort, operationRepository: ObjeniousOperationsRepository,
orderRepository: OrderRepository orderRepository: OrderRepository,
pauseRepository: PauseCancelTaskRepository
}) { }) {
this.httpClient = args.httpClient this.httpClient = args.httpClient
this.operationRepository = args.operationRepository this.objeniousRepository = args.operationRepository
this.orderRepository = args.orderRepository this.orderRepository = args.orderRepository
this.pauseRepository = args.pauseRepository
} }
private async logOperation(data: ObjeniousOperation) { private async logOperation(data: ObjeniousOperation) {
await this.operationRepository.createOperation({ await this.objeniousRepository.createOperation({
...data ...data
}) })
} }
@@ -73,6 +78,8 @@ export class SimUseCases {
request_id: response.data.requestId request_id: response.data.requestId
} }
// TODO: Esto tiene poco sentido si la operacion ya se
// tenia que haber creado en el generador
this.logOperation(operation) this.logOperation(operation)
.then().catch(e => console.error(e)) .then().catch(e => console.error(e))
@@ -89,7 +96,6 @@ export class SimUseCases {
error: undefined, error: undefined,
data: true data: true
} }
} else { } else {
return { return {
error: String(response.status), error: String(response.status),
@@ -110,7 +116,10 @@ export class SimUseCases {
const OPERATION_URL = "/actions/activateLine" const OPERATION_URL = "/actions/activateLine"
return async () => { return async () => {
const req = this.httpClient.client.post(OPERATION_URL, { const req = this.httpClient.client.post(OPERATION_URL, {
...activationData dueDate: activationData.dueDate,
identifier: activationData.identifier,
customerAccountCode: activationData.customerAccountCode,
offer: activationData.offer
}) })
try { try {
@@ -225,18 +234,151 @@ export class SimUseCases {
const OPERATION_URL = "/actions/suspendLine" const OPERATION_URL = "/actions/suspendLine"
return this.generateUseCase({ return this.generateUseCase({
correlation_id: suspendData.correlation_id, correlation_id: suspendData.correlation_id,
operationPayload: suspendData, operationPayload: {
dueDate: suspendData.dueDate,
identifier: suspendData.identifier
},
url: OPERATION_URL, url: OPERATION_URL,
iccid: suspendData.identifier.identifiers[0], // iccid: suspendData.identifier.identifiers[0], //
operation: "suspend" operation: "suspend"
}) })
} }
/**
* Metodo muy especifico para obtener la fecha e activacion o en su defecto
* la actual para aber cuando se va a completar el periodo de test de una linea
*/
private async findActivationDate(actionData: ActionData) {
const iccid = actionData.identifier.identifiers
const lineData = await this.objeniousRepository.getLinesAPI("ICCID", iccid)
let activationDate = new Date()
// Si no se pueden sacar datos de la linea guardo momentaneamente el error
// pero no se cancela la operacion, el error puede ser de objenious y no nos
// puede afectar
console.log("LineData", lineData.data)
if (lineData.error != undefined) {
console.error(lineData.error)
} else {
const activationDateStr = lineData.data[0].status.activationDate
if (activationDateStr != undefined && activationDateStr != "") {
activationDate = new Date(activationDateStr)
}
}
return activationDate
}
/**
* Paso previo a la suspension para evitar errores cuando el billing es test
*/
public stage_suspend(suspendData: ActionData): () => Promise<Result<string, boolean>> {
return async (): Promise<Result<string, boolean>> => {
const correlation_id = suspendData.correlation_id
const iccid = suspendData.identifier.identifiers
const fail = (error: string) => {
console.error("[Sim.usecases]", error)
if (correlation_id != undefined) {
this.orderRepository.updateOrder({
correlation_id: correlation_id,
new_status: "failed"
})
}
}
let activationDate;
try {
activationDate = await this.findActivationDate(suspendData)
} catch (e) {
return {
error: String(e)
}
}
const newTask: CreatePauseCancelTaskDTO = {
iccid: iccid[0],
activation_date: activationDate,
next_check: undefined, // Que se haga instantaneamente al ser la primera
operation_type: "suspend",
action_data: suspendData
}
const taskCreated = await this.pauseRepository.addTask(newTask)
// Caso que la task no se pueda crear en la BDD
if (taskCreated.error != undefined) {
fail(taskCreated.error)
return {
error: taskCreated.error
}
}
// Caso que se haya creado en la BDD
if (correlation_id != undefined) {
this.orderRepository.updateOrder({
correlation_id: correlation_id,
new_status: "running"
})
}
return {
data: true
}
}
}
/**
* Paso previo a la suspension para evitar errores cuando el billing es test
*/
public stage_terminate(terminateData: ActionData): () => Promise<Result<string, boolean>> {
return async (): Promise<Result<string, boolean>> => {
const correlation_id = terminateData.correlation_id
const activationDate = await this.findActivationDate(terminateData)
const newTask: CreatePauseCancelTaskDTO = {
iccid: terminateData.identifier.identifiers[0],
activation_date: activationDate,
next_check: undefined, // Que se haga instantaneamente al ser la primera
operation_type: "terminate",
action_data: terminateData
}
const taskCreated = await this.pauseRepository.addTask(newTask)
// Caso que la task no se pueda crear en la BDD
if (taskCreated.error != undefined) {
console.error("[Sim.usecases]", taskCreated.error)
if (correlation_id != undefined) {
this.orderRepository.updateOrder({
correlation_id: correlation_id,
new_status: "failed"
})
}
return {
error: taskCreated.error
}
}
// Caso que se haya creado en la BDD
if (correlation_id != undefined) {
this.orderRepository.updateOrder({
correlation_id: correlation_id,
new_status: "running"
})
}
return {
data: true
}
}
}
public terminate(terminationData: ActionData): () => Promise<Result<string, boolean>> { public terminate(terminationData: ActionData): () => Promise<Result<string, boolean>> {
const OPERATION_URL = "/actions/terminateLine" const OPERATION_URL = "/actions/terminateLine"
return this.generateUseCase({ return this.generateUseCase({
correlation_id: terminationData.correlation_id, correlation_id: terminationData.correlation_id,
operationPayload: terminationData, operationPayload: {
dueDate: terminationData.dueDate,
identifier: terminationData.identifier
},
url: OPERATION_URL, url: OPERATION_URL,
iccid: terminationData.identifier.identifiers[0], // iccid: terminationData.identifier.identifiers[0], //
operation: "terminate" operation: "terminate"

View File

@@ -1,6 +1,6 @@
import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js" import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js"
import { JWTService } from "../aplication/JWT.service.js"
import { env } from "./env/index.js" import { env } from "./env/index.js"
import { jwtService } from "./jwtService.config.js"
const OBJ_BASE_URL = env.OBJ_BASE_URL const OBJ_BASE_URL = env.OBJ_BASE_URL
@@ -9,5 +9,5 @@ export const httpInstance = new HttpClient({
headers: { headers: {
"content-type": " application/json; charset=utf-8" "content-type": " application/json; charset=utf-8"
}, },
jwtManager: new JWTService() jwtManager: jwtService
}) })

View File

@@ -0,0 +1,59 @@
import { GrantAccessRequestBody, JWTService } from "sim-shared/aplication/JWT.service.js"
import { env } from "./env/index.js"
import { JWTHeader } from "sim-shared/domain/JWT.js"
const PRIVATE_KEY_PATH = env.OBJ_PEM_PATH
const GET_TOKEN_URL = "https://idp.docapost.io/auth/realms/GETWAY/protocol/openid-connect/token"
const REFRESH_TOKEN_URL = GET_TOKEN_URL
const DEFAULT_BODY: GrantAccessRequestBody = {
grant_type: "client_credentials",
client_id: env.OBJ_CLIENT_ID,
client_assertion_type: "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
client_assertion: env.OBJ_CLI_ASSERTION
}
const DEFAULT_HEADERS = {
"content-type": "application/x-www-form-urlencoded"
}
const DEFAULT_HEADERS_JWT = {
alg: "RS256",
typ: "JWT",
kid: env.OBJ_KID,
}
const DEFAULT_DATA_JWT = {
sub: env.OBJ_CLIENT_ID,
iss: env.OBJ_CLIENT_ID,
aud: "https://idp.docapost.io/auth/realms/GETWAY",
jti: Date.now().toString(),
}
function addIATHeaders(authHeaders: Object) {
const headers = <JWTHeader>{
...authHeaders,
sub: env.OBJ_CLIENT_ID,
iss: env.OBJ_CLIENT_ID,
aud: GET_TOKEN_URL,
jti: Date.now().toString(),
iat: Math.floor(Date.now() / 1000),
exp: Math.floor(Date.now() / 1000) + 5 * 60,
}
return headers
}
export const jwtService = new JWTService({
transformJWTHeaders: addIATHeaders,
defaultHeaders: DEFAULT_HEADERS,
defaultBody: DEFAULT_BODY,
defaultJWTHeaders: DEFAULT_HEADERS_JWT,
defaultJWTPayload: DEFAULT_DATA_JWT,
privateKeyPath: PRIVATE_KEY_PATH,
tokenUrl: GET_TOKEN_URL,
refreshTokenUrl: REFRESH_TOKEN_URL
})

View File

@@ -8,6 +8,7 @@ import { SimUseCases } from "./aplication/Sim.usecases.js"
import { SimController } from "./aplication/Sim.controller.js" import { SimController } from "./aplication/Sim.controller.js"
import { SimRouter } from "./aplication/Sim.router.js" import { SimRouter } from "./aplication/Sim.router.js"
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js" import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
import { PauseCancelTaskRepository } from "#adapters/PauseCancelTaskRepository.js"
async function startWorker() { async function startWorker() {
const rmqClient = await startRMQClient() const rmqClient = await startRMQClient()
@@ -18,15 +19,21 @@ async function startWorker() {
await pgClient.checkDatabaseConnection() await pgClient.checkDatabaseConnection()
const operationRepository = new ObjeniousOperationsRepository(pgClient) const operationRepository = new ObjeniousOperationsRepository(
httpClient,
pgClient,
)
const orderRepository = new OrderRepository(pgClient) const orderRepository = new OrderRepository(pgClient)
const pauseRepository = new PauseCancelTaskRepository(pgClient)
const simActivationController = new SimController( const simActivationController = new SimController(
rmqClient, rmqClient,
new SimUseCases({ new SimUseCases({
httpClient: httpClient, httpClient: httpClient,
operationRepository: operationRepository, operationRepository: operationRepository,
orderRepository: orderRepository orderRepository: orderRepository,
pauseRepository: pauseRepository
}) })
) )
const simRouter = new SimRouter(simActivationController, rmqClient) const simRouter = new SimRouter(simActivationController, rmqClient)

View File

@@ -0,0 +1,72 @@
import { after, before, describe, it } from "node:test";
import { CreatePauseCancelTaskDTO, PauseCancelTaskRepository } from "./PauseCancelTaskRepository.js";
import { postgrClient } from "#config/postgreConfig.js";
import assert from "node:assert";
const testTask: CreatePauseCancelTaskDTO = {
iccid: "1234",
operation_type: "suspend",
activation_date: new Date(),
next_check: new Date(),
action_data: {
dueDate: new Date().toString(),
correlation_id: "12223",
identifier: {
identifiers: ["1234"],
identifierType: "ICCID"
}
}
}
describe("Test PauseCancelTaskRepository - DB", () => {
const createdIds: number[] = [];
const pauseRepo = new PauseCancelTaskRepository(postgrClient)
before(() => {
})
after(() => {
})
it("Should create a task", async () => {
const created = await pauseRepo.addTask(testTask)
assert.ok(created != undefined, "A value must be returned always")
assert.ok(created.error == undefined, "Should not return a error")
assert.ok(created.data != undefined, "Data must be returned")
createdIds.push(created.data.id)
})
it("Should update a existing task", async () => {
const updated = await pauseRepo.updateTask({
id: createdIds[0],
next_check: new Date()
})
assert.ok(updated != undefined, "A value must be returned always")
assert.ok(updated.error == undefined, "Should not return a error")
assert.ok(updated.data != undefined, "Data must be returned")
})
it("Should finish a existing task", async () => {
const finish = await pauseRepo.finishTask({
id: createdIds[0],
error: "ok"
})
assert.ok(finish != undefined, "A value must be returned always")
assert.ok(finish.error == undefined, "Should not return a error")
assert.ok(finish.data != undefined, "Data must be returned")
})
it("Should get at least 1 pending task", async () => {
const created = await pauseRepo.addTask(testTask)
const pending = await pauseRepo.getPending()
assert.ok(pending != undefined, "A value must be returned always")
assert.ok(pending.error == undefined, "Should not return a error")
assert.ok(pending.data != undefined, "Data must be returned")
console.log("--> ", pending.data[0])
})
})

View File

@@ -0,0 +1,126 @@
import { Result } from "sim-shared/domain/Result.js";
import { QueryResult } from "pg";
import { PgClient } from "sim-shared/infrastructure/PgClient.js";
import { AxiosError } from "axios";
import { ActionData } from "#domain/DTOs/objeniousapi.js";
export type PauseCancelTask = {
id: number;
iccid: string;
operation_type: "suspend" | "terminate",
last_checked?: Date | null;
activation_date?: Date | null;
next_check?: Date | null;
completed_date?: Date | null;
error?: string | null;
action_data: ActionData
}
export type CreatePauseCancelTaskDTO = Pick<PauseCancelTask, "iccid" | "activation_date" | "next_check" | "operation_type" | "action_data">
export type UpdatePauseCancelTaskDTO = Pick<PauseCancelTask, "id" | "next_check">
export type FinishPauseCancelTaskDTO = Pick<PauseCancelTask, "id" | "error">
/**
* Repositorio para compensar los problemas de cacelcaiones/pausas de objenious a
* la hora aplicarlo sobre una linea con el billing a test.
*/
export class PauseCancelTaskRepository {
constructor(
private readonly pgClient: PgClient
) {
}
/**
* Obtiene las siguientes que se pueden lanzar, puede haber más pero
* estan pendientes
*/
public async getPending(): Promise<Result<string, PauseCancelTask[]>> {
const sql = `
SELECT * FROM pause_cancel_tasks
WHERE completed_date IS NULL
AND (next_check <= NOW() OR next_check IS NULL)
ORDER BY id ASC;
`;
try {
const res: QueryResult<PauseCancelTask> = await this.pgClient.query(sql);
return {
data: res.rows
}
} catch (e) {
return {
error: (e as AxiosError).message
}
}
}
public async addTask(task: CreatePauseCancelTaskDTO): Promise<Result<string, PauseCancelTask>> {
const sql = `
INSERT INTO pause_cancel_tasks (iccid, activation_date, next_check, last_checked, operation_type, action_data)
VALUES ($1, $2, $3, now(), $4, $5)
RETURNING *;
`;
try {
const values = [task.iccid, task.activation_date, task.next_check, task.operation_type, JSON.stringify(task.action_data)];
const res: QueryResult<PauseCancelTask> = await this.pgClient.query(sql, values);
return {
data: res.rows[0]
}
} catch (e) {
return {
error: (e as AxiosError).message
}
}
}
/**
* Se ha vuelto a comprobar la tarea pero sigue en test
*/
public async updateTask(updateData: UpdatePauseCancelTaskDTO): Promise<Result<string, PauseCancelTask>> {
const sql = `
UPDATE pause_cancel_tasks
SET last_checked = now(), next_check = $1
WHERE id = $2
RETURNING *;
`;
try {
const res = await this.pgClient.query<PauseCancelTask>(sql, [updateData.next_check, updateData.id]);
return {
data: res.rows[0]
}
} catch (e) {
return {
error: (e as AxiosError).message
}
}
}
/**
* La tarea ha termiando bien o mal
*/
public async finishTask(finishData: FinishPauseCancelTaskDTO) {
const sql = `
UPDATE pause_cancel_tasks
SET completed_date = NOW(), error = $1
WHERE id = $2
RETURNING *;
`;
try {
const res = await this.pgClient.query(sql, [finishData.error, finishData.id]);
return {
data: res.rows[0]
}
} catch (e) {
return {
error: (e as AxiosError).message
}
}
}
}

View File

@@ -53,7 +53,7 @@
} }
}, },
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1", "test": "node --import tsx --test ./**/*.test.ts",
"dev": "tsx watch index.ts", "dev": "tsx watch index.ts",
"build": "tsc --build && yarn tsc-alias -p tsconfig.json && cp .env package.json ../../dist/packages/sim-consumidor-objenious/", "build": "tsc --build && yarn tsc-alias -p tsconfig.json && cp .env package.json ../../dist/packages/sim-consumidor-objenious/",
"start": "node ../../dist/packages/sim-consumidor-objenious/index.js", "start": "node ../../dist/packages/sim-consumidor-objenious/index.js",

View File

@@ -3,6 +3,7 @@ import { SimUsecases } from "./Sim.usecases.js"
import { activationValidator, iccidValidator } from "./httpValidators.js" import { activationValidator, iccidValidator } from "./httpValidators.js"
import { companyFromIccid } from "#domain/companies.js" import { companyFromIccid } from "#domain/companies.js"
import { BodyValidator } from "sim-shared/aplication/BodyValidator.js" import { BodyValidator } from "sim-shared/aplication/BodyValidator.js"
import { tryCatch } from "packages/sim-shared/domain/Result.js"
export class SimController { export class SimController {
@@ -35,21 +36,22 @@ export class SimController {
}) { }) {
return async (req: Request, res: Response) => { return async (req: Request, res: Response) => {
const body = req.body const body = req.body
// 1. Validacion del body // 1. Validacion del body
try { if (args.validator != undefined) {
if (args.validator != undefined) const validationResult = args.validator.validate(body)
args.validator.validate(body) if (validationResult.error != undefined) {
} catch (e) { res.status(422).json({
if (args.onError != undefined) args.onError(body, e as string) errors: {
res.status(422).json({ ...validationResult.error
errors: { }
msg: e })
} args.onError(body, validationResult.error.msg)
}) return 1;
}
} }
// 2. Transformacion del body // 2. Transformacion del body
// TODO: sustituir el try cach
let data: P = body; let data: P = body;
try { try {
if (args.mapBody != undefined) if (args.mapBody != undefined)
@@ -60,26 +62,33 @@ export class SimController {
msg: "Error parseando el body: " + e msg: "Error parseando el body: " + e
} }
}) })
args.onError(body, String(e))
return 1;
} }
// 3. Aplicacion del UseCase // 3. Aplicacion del UseCase
try { // TODO: todos los use cases tienen que pasar a devolver un Result<>
const usecaseResult = await args.useCase(data) const usecaseResult = await args.useCase(data) // no deberia hacer falta el trycatch
// 4. Se devuelve al usuario el caso de exito
res.status(200).json( // 4. Casos de error del usecase
usecaseResult if (usecaseResult.error != undefined) {
).send()
args.onSuccess(data)
} catch (err) {
// 4.1 Error del caso de uso // 4.1 Error del caso de uso
res.status(500).json({ res.status(500).json({
errors: { errors: {
msg: "Error general:" + err ...usecaseResult.error
} }
}).send() }).send()
return; args.onError(body, usecaseResult.error ?? "Error indefinido")
return 1;
} }
// 5. Se devuelve al usuario el caso de exito
res.status(200).json(
usecaseResult.data
).send()
args.onSuccess(usecaseResult.data)
return 0;
} }
} }
@@ -92,155 +101,64 @@ export class SimController {
console.log("OK", data) console.log("OK", data)
} }
}) })
} }
public preactivation() { public preactivation() {
return async (req: Request, res: Response) => { return this.controllerGenerator<{ iccid: string, offer: string }, { iccid: string, offer: string, compañia: string }>({
console.warn("[!] Se deberia de usar la peticion /sim/activate directamente") validator: activationValidator,
try { mapBody: (b) => {
iccidValidator.validate(req.body) const { iccid, offer } = b
} catch (e) { const compañia = companyFromIccid(iccid)
res.status(422).json({ return { iccid, compañia, offer }
errors: { },
msg: e useCase: (args) => this.simUseCases.preActivation(args),
} onError: (d, e) => console.error("[x] Error preactivation: ", d, e),
}) onSuccess: console.log
} })
const { iccid } = req.body
const compañia = companyFromIccid(iccid)
try {
await this.simUseCases.preActivation({ iccid, compañia })
res.status(200).json({
iccid: iccid,
operation: "activation"
}).send()
} catch (err) {
console.error("Error activando la sim ", req.body)
res.status(500).json({
errors: {
msg: "Error general de activation"
}
}).send()
return;
}
}
} }
public activation() { public activation() {
return async (req: Request, res: Response) => { return this.controllerGenerator<{ iccid: string, offer: string }, { iccid: string, offer: string, compañia: string }>({
try { validator: activationValidator,
activationValidator.validate(req.body) mapBody: (b) => {
} catch (e) { const { iccid, offer } = b
res.status(422).json({ const compañia = companyFromIccid(iccid)
errors: { return { iccid, compañia, offer }
msg: e },
} useCase: (args) => this.simUseCases.activation(args),
}) onError: (d, e) => console.error("[x] Error activacion: ", d, e),
console.error("[!] Error validando mensaje") onSuccess: console.log
return; })
}
const { iccid, offer } = req.body
const compañia = companyFromIccid(iccid)
if (compañia == undefined) {
res.status(500).json({
errors: {
msg: "El iccid no pertenece a una compañia conocida"
}
})
return;
}
try {
await this.simUseCases.activation({ iccid, compañia, offer })
res.status(200).json({
iccid: iccid,
operation: "activation"
}).send()
return;
} catch (err) {
console.error("Error activando la sim ", req.body)
res.status(500).json({
errors: {
msg: "Error general de activation"
}
}).send()
return;
}
}
} }
public cancelation() { public cancelation() {
return async (req: Request, res: Response) => { return this.controllerGenerator<{ iccid: string }, { iccid: string, compañia: string }>({
try { validator: iccidValidator,
iccidValidator.validate(req.body) mapBody: (b) => {
} catch (e) { const { iccid } = b
res.status(422).json({ const compañia = companyFromIccid(iccid)
errors: { return { iccid, compañia }
msg: e },
} useCase: (args) => this.simUseCases.cancelation(args),
}) // TODO: Meter en los mensajes el nombre de la operacion
} onError: (d, e) => console.error("[x] Error cancelacion: ", d, e),
onSuccess: console.log
const { iccid } = req.body })
const compañia = companyFromIccid(iccid)
try {
await this.simUseCases.cancelation({ iccid, compañia })
res.status(200).json({
iccid: iccid,
operation: "cancelation"
})
} catch (err) {
console.error("Error cancelando la sim ", req.body)
res.status(500).json({
errors: {
msg: "Error general de cancelacion"
}
})
}
}
} }
public pause() { public pause() {
return async (req: Request, res: Response) => { return this.controllerGenerator<{ iccid: string }, { iccid: string, compañia: string }>({
try { validator: iccidValidator,
iccidValidator.validate(req.body) mapBody: (b) => {
} catch (e) { const { iccid } = b
res.status(422).json({ const compañia = companyFromIccid(iccid)
errors: { return { iccid, compañia }
msg: e },
} useCase: (args) => this.simUseCases.pause(args),
}) onError: (d, e) => console.error("[x] Error pausa: ", d, e),
} onSuccess: console.log
})
const { iccid } = req.body
const compañia = companyFromIccid(iccid)
try {
await this.simUseCases.pause({ iccid, compañia })
res.status(200).json({
iccid: iccid,
operation: "cancelation"
})
} catch (err) {
console.error("Error pausando la sim ", req.body)
res.status(500).json({
errors: {
msg: "Error pausando la sim"
}
})
}
}
} }
public free() { public free() {

View File

@@ -24,7 +24,7 @@ export class SimUsecases {
} }
/** /**
* Añade un id de mensaje (correlation_id en ala base de datos) * Añade un id de mensaje (correlation_id en la base de datos) a los mensajes que van a entrar en la cola
*/ */
private addMessage_id(event: SimEvents.general): SimEvents.general & { headers: { message_id: string } } { private addMessage_id(event: SimEvents.general): SimEvents.general & { headers: { message_id: string } } {
const uuid = uuidv7() const uuid = uuidv7()
@@ -65,7 +65,6 @@ export class SimUsecases {
const result = await this.orderRepository.createOrder<T>(order) const result = await this.orderRepository.createOrder<T>(order)
return result; return result;
} }
async test(args: { iccid: string }) { async test(args: { iccid: string }) {
@@ -84,7 +83,7 @@ export class SimUsecases {
} }
/** /**
* WIP * TODO:
* Crea una nueva sim de la que no se tenia registro anteriormente * Crea una nueva sim de la que no se tenia registro anteriormente
* Si ya existia se modifican los campos pero no se hace un cambio * Si ya existia se modifican los campos pero no se hace un cambio
* de estado. * de estado.
@@ -101,8 +100,8 @@ export class SimUsecases {
return this.eventBus.publish([activationEvent]) return this.eventBus.publish([activationEvent])
} }
async activation(args: { iccid: string, compañia: string, offer: string }) { async activation(args: { iccid: string, compañia: string, offer: string }):
Promise<Result<string, { iccid: string, message_id: string, operation: "activation" }>> {
const activationEvent = <SimEvents.activation>{ const activationEvent = <SimEvents.activation>{
key: `sim.${args.compañia}.activate`, key: `sim.${args.compañia}.activate`,
payload: { payload: {
@@ -110,14 +109,29 @@ export class SimUsecases {
offer: args.offer offer: args.offer
} }
} }
const activationWithId = this.addMessage_id(activationEvent) const activationWithId = this.addMessage_id(activationEvent)
console.log("[d] Activation ", activationWithId) console.log("[d] Activation ", activationWithId)
await this.eventBus.publish([activationWithId]) await this.eventBus.publish([activationWithId])
await this.saveOrder(activationWithId) const createdOrder = await this.saveOrder<SimEvents.activation>(activationWithId)
if (createdOrder.error != undefined) {
console.error(createdOrder.error)
return {
error: createdOrder.error
}
}
return {
data: {
iccid: args.iccid,
operation: "activation",
message_id: createdOrder.data?.correlation_id
}
}
} }
async preActivation(args: { iccid: string, compañia: string }) { async preActivation(args: { iccid: string, compañia: string }):
Promise<Result<string, { iccid: string, message_id: string, operation: "preactivation" }>> {
const preActivationEvent = <SimEvents.preActivation>{ const preActivationEvent = <SimEvents.preActivation>{
key: `sim.${args.compañia}.preActivate`, key: `sim.${args.compañia}.preActivate`,
@@ -126,13 +140,30 @@ export class SimUsecases {
} }
} }
console.log("[d] Pre - activation ", preActivationEvent) console.log("[d] Pre - activation ", preActivationEvent)
return this.eventBus.publish([preActivationEvent]) await this.eventBus.publish([preActivationEvent])
const preactivationWithId = this.addMessage_id(preActivationEvent)
const createdOrder = await this.saveOrder<SimEvents.preActivation>(preactivationWithId)
if (createdOrder.error != undefined) {
console.error(createdOrder.error)
return {
error: createdOrder.error
}
}
return {
data: {
iccid: args.iccid,
operation: "preactivation",
message_id: createdOrder.data?.correlation_id
}
}
} }
/** /**
* Para objenious es terminate * Para objenious es terminate
*/ */
async cancelation(args: { iccid: string, compañia: string }) { async cancelation(args: { iccid: string, compañia: string }):
Promise<Result<string, { iccid: string, message_id: string, operation: "cancelation" }>> {
const cancelationEvent = <SimEvents.cancel>{ const cancelationEvent = <SimEvents.cancel>{
key: `sim.${args.compañia}.cancel`, key: `sim.${args.compañia}.cancel`,
@@ -144,8 +175,21 @@ export class SimUsecases {
const cancelationWithId = this.addMessage_id(cancelationEvent) const cancelationWithId = this.addMessage_id(cancelationEvent)
console.log("[d] Cancelation ", cancelationWithId) console.log("[d] Cancelation ", cancelationWithId)
await this.eventBus.publish([cancelationWithId]) await this.eventBus.publish([cancelationWithId])
await this.saveOrder(cancelationWithId) const savedOrder = await this.saveOrder(cancelationWithId)
return cancelationWithId if (savedOrder.error != undefined) {
console.error(savedOrder.error)
return {
error: savedOrder.error
}
}
return {
data: {
iccid: args.iccid,
message_id: savedOrder.data.correlation_id,
operation: "cancelation"
}
}
} }
// alias por si acaso // alias por si acaso
public terminate = this.cancelation; public terminate = this.cancelation;
@@ -153,7 +197,8 @@ export class SimUsecases {
/** /**
* alias de bloquear / suspender en objenious * alias de bloquear / suspender en objenious
*/ */
async pause(args: { iccid: string, compañia: string }) { async pause(args: { iccid: string, compañia: string }):
Promise<Result<string, { iccid: string, message_id: string, operation: "pause" }>> {
const pauseEvent = <SimEvents.pause>{ const pauseEvent = <SimEvents.pause>{
key: `sim.${args.compañia}.pause`, key: `sim.${args.compañia}.pause`,
payload: { payload: {
@@ -161,10 +206,25 @@ export class SimUsecases {
} }
} }
const pauseWithId = this.addMessage_id(pauseEvent) const pauseWithId = this.addMessage_id(pauseEvent)
console.log("[d] Cancelation ", pauseWithId) console.log("[d] Pause", pauseWithId)
await this.eventBus.publish([pauseWithId]) await this.eventBus.publish([pauseWithId])
await this.saveOrder(pauseWithId) await this.saveOrder(pauseWithId)
return pauseWithId const savedOrder = await this.saveOrder(pauseWithId)
if (savedOrder.error != undefined) {
console.error(savedOrder.error)
return {
error: savedOrder.error
}
}
return {
data: {
iccid: args.iccid,
message_id: savedOrder.data.correlation_id,
operation: "pause"
}
}
} }
async free(args: { iccid: string, compañia: string }) { async free(args: { iccid: string, compañia: string }) {

View File

@@ -8,9 +8,10 @@ describe("test validators", () => {
iccid: "8933201125068886692" iccid: "8933201125068886692"
} }
const res = iccidValidator.validate(validBody) const res = iccidValidator.validate(validBody)
assert(res == true) assert(res.error == undefined)
}), }),
// TODO: Nada de esto es valido, a partir de ahora los validadores no lanzan excepcion sino Result
it("shouldnt validate empty string iccid", () => { it("shouldnt validate empty string iccid", () => {
const validBody = { const validBody = {
iccid: "" iccid: ""

View File

@@ -3,7 +3,9 @@ import { BodyValidator, Validator } from "sim-shared/aplication/BodyValidator.js
const offers = new Map([ const offers = new Map([
["mensual", "SAVEFAMILY1"], ["mensual", "SAVEFAMILY1"],
["anual", "SAVEFAMILY2"] ["anual", "SAVEFAMILY2"],
["SAVEFAMILY1", "SAVEFAMILY1"],
["SAVEFAMILY2", "SAVEFAMILY2"],
]) ])
const iccidLongitudValidator = <Validator<{ iccid: string }>>{ const iccidLongitudValidator = <Validator<{ iccid: string }>>{

View File

@@ -1,5 +1,6 @@
import express from "express" import express from "express"
import cors from 'cors'; import cors from 'cors';
import path from 'path';
import { simRoutes } from "./infrastructure/simRoutes.http.js" import { simRoutes } from "./infrastructure/simRoutes.http.js"
import { rabbitmqEventBus } from '#config/eventBusConfig.js'; import { rabbitmqEventBus } from '#config/eventBusConfig.js';
import { env } from "#config/env/index.js" import { env } from "#config/env/index.js"
@@ -27,6 +28,8 @@ app.use(express.urlencoded({ extended: true }));
app.use("/sim", simRoutes) app.use("/sim", simRoutes)
app.use("/orders", orderRoutes) app.use("/orders", orderRoutes)
app.use("/docs", express.static(path.join(process.cwd(), '../../docs')))
app.get("/health", (req, res) => { app.get("/health", (req, res) => {
res.status(200).json({ status: "ok" }) res.status(200).json({ status: "ok" })
}) })

View File

@@ -7,6 +7,6 @@ OBJ_KID=xNfbMiyL1ORXGP8lElhcv8nVaG3EJKye4Lc1YoN3I1E
OBJ_BASE_URL=https://api-getway.objenious.com/ws OBJ_BASE_URL=https://api-getway.objenious.com/ws
# OBJ_BASE_URL=https://api-getway.objenious.com/ws/test # OBJ_BASE_URL=https://api-getway.objenious.com/ws/test
# NOTIFICATION_URL="https://sf-sim-activation.savefamilygps.net/send-activation-mail" NOTIFICATION_URL="https://sf-sim-activation.savefamilygps.net/send-activation-mail"
NOTIFICATION_URL="localhost" # NOTIFICATION_URL="localhost"
SIM_ACTIVATION_API_KEY=9e48c4ac-1ab0-4397-b3f3-6c239200dfe6 SIM_ACTIVATION_API_KEY=9e48c4ac-1ab0-4397-b3f3-6c239200dfe6

View File

@@ -31,20 +31,21 @@ export const env = {
OBJ_KID: String(process.env.OBJ_KID), OBJ_KID: String(process.env.OBJ_KID),
OBJ_BASE_URL: String(process.env.OBJ_BASE_URL), OBJ_BASE_URL: String(process.env.OBJ_BASE_URL),
NOTIFICATION_URL: String(process.env.NOTIFICATION_URL), NOTIFICATION_URL: String(process.env.NOTIFICATION_URL ?? ""),
SIM_ACTIVATION_API_KEY: String(process.env.SIM_ACTIVATION_API_KEY) SIM_ACTIVATION_API_KEY: String(process.env.SIM_ACTIVATION_API_KEY ?? "")
}; };
// assert las partes criticas // assert las partes criticas
assert(env.RABBITMQ_PASSWORD != undefined) assert(env.RABBITMQ_PASSWORD != undefined)
assert(env.RABBITMQ_USER != undefined) assert(env.RABBITMQ_USER != undefined)
assert(env.SIM_ACTIVATION_API_KEY != undefined) assert(env.SIM_ACTIVATION_API_KEY != "")
assert(env.NOTIFICATION_URL != undefined) assert(env.NOTIFICATION_URL != "")
if (env.ENVIRONMENT == "production") { if (env.ENVIRONMENT == "production") {
assert(env.RABBITMQ_PASSWORD != "guest") assert(env.RABBITMQ_PASSWORD != "guest")
assert(env.RABBITMQ_HOST != "localhost") assert(env.RABBITMQ_HOST != "localhost")
} }
console.log("CRON: ENV", env)
console.log("[i] verificado env")

View File

@@ -1,6 +1,7 @@
import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js" import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js"
import { env } from "./env/index.js" import { env } from "./env/index.js"
import { JWTService } from "packages/sim-consumidor-objenious/aplication/JWT.service.js" import { jwtService } from "./jwtService.config.js"
const OBJ_BASE_URL = env.OBJ_BASE_URL const OBJ_BASE_URL = env.OBJ_BASE_URL
@@ -9,5 +10,5 @@ export const httpInstance = new HttpClient({
headers: { headers: {
"content-type": " application/json; charset=utf-8" "content-type": " application/json; charset=utf-8"
}, },
jwtManager: new JWTService() jwtManager: jwtService
}) })

View File

@@ -0,0 +1,20 @@
/**
* Cliente de postgres para la intranet. Se usa solo porque hace falta para el
* volcado de datos, si se usa en mas partes algo estás haciendo mal.
*/
import { Pool } from 'pg';
import { PgClient } from 'sim-shared/infrastructure/PgClient.js'
import { env } from './env/index.js';
export const pgPoolIntranet = new Pool({
user: env.POSTGRES_USER,
host: env.POSTGRES_HOST,
database: "intranet",
password: env.POSTGRES_PASSWORD,
port: Number(env.POSTGRES_PORT) || 5432,
});
export const postgresClientIntranet = new PgClient({
pool: pgPoolIntranet
})

View File

@@ -0,0 +1,59 @@
import { GrantAccessRequestBody, JWTService } from "sim-shared/aplication/JWT.service.js"
import { env } from "./env/index.js"
import { JWTHeader } from "sim-shared/domain/JWT.js"
const PRIVATE_KEY_PATH = env.OBJ_PEM_PATH
const GET_TOKEN_URL = "https://idp.docapost.io/auth/realms/GETWAY/protocol/openid-connect/token"
const REFRESH_TOKEN_URL = GET_TOKEN_URL
const DEFAULT_BODY: GrantAccessRequestBody = {
grant_type: "client_credentials",
client_id: env.OBJ_CLIENT_ID,
client_assertion_type: "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
client_assertion: env.OBJ_CLI_ASSERTION
}
const DEFAULT_HEADERS = {
"content-type": "application/x-www-form-urlencoded"
}
const DEFAULT_HEADERS_JWT = {
alg: "RS256",
typ: "JWT",
kid: env.OBJ_KID,
}
const DEFAULT_DATA_JWT = {
sub: env.OBJ_CLIENT_ID,
iss: env.OBJ_CLIENT_ID,
aud: "https://idp.docapost.io/auth/realms/GETWAY",
jti: Date.now().toString(),
}
function addIATHeaders(authHeaders: Object) {
const headers = <JWTHeader>{
...authHeaders,
sub: env.OBJ_CLIENT_ID,
iss: env.OBJ_CLIENT_ID,
aud: GET_TOKEN_URL,
jti: Date.now().toString(),
iat: Math.floor(Date.now() / 1000),
exp: Math.floor(Date.now() / 1000) + 5 * 60,
}
return headers
}
export const jwtService = new JWTService({
transformJWTHeaders: addIATHeaders,
defaultHeaders: DEFAULT_HEADERS,
defaultBody: DEFAULT_BODY,
defaultJWTHeaders: DEFAULT_HEADERS_JWT,
defaultJWTPayload: DEFAULT_DATA_JWT,
privateKeyPath: PRIVATE_KEY_PATH,
tokenUrl: GET_TOKEN_URL,
refreshTokenUrl: REFRESH_TOKEN_URL
})

View File

@@ -5,6 +5,12 @@ import { httpInstance } from "./config/httpClient.config.js"
import { CheckObjeniousRequests } from "./tasks/check_objenious_request.js" import { CheckObjeniousRequests } from "./tasks/check_objenious_request.js"
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js" import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js"
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js" import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
import { TaskVolcadoLineas } from "./tasks/volcado_lineas.js"
import { ObjeniousLinesRepository } from "./infranstructure/ObjeniousLinesRepository.js"
import { postgresClientIntranet } from "./config/intranetPostgresConfig.js"
import { PauseCancelTaskRepository } from "packages/sim-consumidor-objenious/infrastructure/PauseCancelTaskRepository.js"
import { PauseTerminateTask } from "./tasks/check_pause_terminate.js"
import { SimUseCases } from "packages/sim-consumidor-objenious/aplication/Sim.usecases.js"
async function startCron() { async function startCron() {
const commonSettings = { const commonSettings = {
@@ -14,10 +20,16 @@ async function startCron() {
const httpClient = httpInstance const httpClient = httpInstance
const pgClient = new PgClient({ pool: pgPool }) const pgClient = new PgClient({ pool: pgPool })
console.log("[i] Comprobando conexion con la BDD ")
await pgClient.checkDatabaseConnection() await pgClient.checkDatabaseConnection()
await pgClient.checkDatabaseConnection()
const operationRepository = new ObjeniousOperationsRepository(pgClient) const operationRepository = new ObjeniousOperationsRepository(
httpClient,
pgClient,
)
const orderRepository = new OrderRepository(pgClient) const orderRepository = new OrderRepository(pgClient)
const objeniousLineRepository = new ObjeniousLinesRepository(postgresClientIntranet)
const objTask = new CheckObjeniousRequests( const objTask = new CheckObjeniousRequests(
operationRepository, operationRepository,
@@ -25,23 +37,56 @@ async function startCron() {
httpClient, httpClient,
) )
await objTask.getPendingOperations() const objeniosRepo = new ObjeniousOperationsRepository(
httpClient,
pgClient
)
const volcadoLineasTask = new TaskVolcadoLineas(
objeniousLineRepository,
objeniosRepo
)
const pauseRepo = new PauseCancelTaskRepository(pgClient)
const simUsecases = new SimUseCases({
httpClient: httpClient,
operationRepository: operationRepository,
orderRepository: orderRepository,
pauseRepository: pauseRepo
})
const pauseTask = new PauseTerminateTask(
objeniosRepo,
pauseRepo,
simUsecases,
orderRepository
)
const PERIODO_PETICIONES = 10 * 60 * 1000
const interval = setInterval(async () => { const interval = setInterval(async () => {
console.log("Updating...") try {
await objTask.getPendingOperations() await objTask.getPendingOperations()
console.log("Update finished") } catch (e) {
}, 10 * 60 * 1000) console.error("[x] Error de actualizacion de las lineas ")
/* }
const task = cron.createTask("* * * * *", async () => { }, PERIODO_PETICIONES)
}
, { const PERIODO_VOLCADO = 60 * 60 * 1000
...commonSettings, const volcadoInterval = setInterval(async () => {
name: "Test" try {
}) await volcadoLineasTask.loadLines()
*/ } catch (e) {
console.error("[x] Volcado de lineas de Objenious Fallido", e)
}
}, PERIODO_VOLCADO)
await pauseTask.run()
const PERIODO_CANCELACIONES = 60 * 60 * 1000;
const clacelacionesInterval = setInterval(async () => {
await pauseTask.run()
}, PERIODO_CANCELACIONES)
//await objTask.getPendingOperations()
} }

View File

@@ -0,0 +1,59 @@
import test, { after, before, describe } from "node:test";
import { CreateObjeniousLineDTO } from "sim-shared/domain/objeniousLine.js";
import { ObjeniousLinesRepository } from "./ObjeniousLinesRepository.js";
import { postgrClient } from "../config/postgreConfig.js";
import assert from "node:assert";
describe("Line insertion test", async () => {
//const pgClient = postgreClientIntranet
const pgClient = postgrClient // En prod hay que usar el de Intrantet para usar la otra base de datos
const lineRepository = new ObjeniousLinesRepository(pgClient)
const lineaTest: CreateObjeniousLineDTO = {
simId: 1234,
iccid: "9999999999999",
msisdn: "34654674732",
imei: "219789481293",
imeiChangeDate: new Date(),
offerCode: "SAVEFAMILY1",
status: "ACTIVATED",
preactivationDate: new Date(),
activationDate: new Date(),
commercialStatus: "test",
commercialStatusDate: new Date(),
billingStatus: "test",
billingStatusChangeDate: new Date(),
billingActivationDate: new Date(),
createDate: new Date(),
raw: { test: "test" } as any // Para este test no hace falta
}
// Clean up before and after tests to ensure isolation
const cleanup = async () => {
await pgClient.query("DELETE FROM objenious_lines WHERE simId = 1234");
};
before(async () => {
await cleanup()
})
after(async () => {
await cleanup()
})
test("Should insert new line", async () => {
const res = await lineRepository.insertOrUpdate(lineaTest)
assert.ok(res != undefined, "The line wasn't created")
})
test("Should not update a line if the hash is the same", async () => {
const res = await lineRepository.insertOrUpdate(lineaTest)
assert.ok(res == undefined, "The line have been updated")
})
test("Should update a line if the hash changes", async () => {
const updated = structuredClone(lineaTest)
lineaTest.billingActivationDate = new Date()
const res = await lineRepository.insertOrUpdate(lineaTest)
assert.ok(res != undefined, "The line have been updated")
})
})

View File

@@ -0,0 +1,112 @@
/**
* Repositorio para el volcado de lineas de objenious en intranet
* solo para uso en el volcado.
*/
import { createHash } from "node:crypto";
import { PoolClient } from "pg";
import { CreateObjeniousLineDTO } from "sim-shared/domain/objeniousLine.js";
import { PgClient } from "sim-shared/infrastructure/PgClient.js";
export class ObjeniousLinesRepository {
constructor(
private pgClient: PgClient
) {
}
private generateLineHash(data: CreateObjeniousLineDTO) {
try {
const lineStr = JSON.stringify(data)
const hash = createHash("sha256").update(lineStr).digest("base64url")
return hash
} catch (e) {
console.error("[x] Error generando el hash de la linea", data)
return undefined
}
}
public async insertOrUpdate(data: CreateObjeniousLineDTO) {
const query = `
INSERT INTO objenious_lines (
simId,
iccid,
msisdn,
imei,
imeiChangeDate,
offerCode,
status,
preactivationDate,
activationDate,
commercialStatus,
commercialStatusDate,
billingStatus,
billingStatusChangeDate,
billingActivationDate,
createDate,
raw,
hash
) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17
)
ON CONFLICT (simId)
DO UPDATE SET
iccid = EXCLUDED.iccid,
msisdn = EXCLUDED.msisdn,
imei = EXCLUDED.imei,
imeiChangeDate = EXCLUDED.imeiChangeDate,
offerCode = EXCLUDED.offerCode,
status = EXCLUDED.status,
preactivationDate = EXCLUDED.preactivationDate,
activationDate = EXCLUDED.activationDate,
commercialStatus = EXCLUDED.commercialStatus,
commercialStatusDate = EXCLUDED.commercialStatusDate,
billingStatus = EXCLUDED.billingStatus,
billingStatusChangeDate = EXCLUDED.billingStatusChangeDate,
billingActivationDate = EXCLUDED.billingActivationDate,
raw = EXCLUDED.raw,
hash = EXCLUDED.hash
WHERE objenious_lines.hash IS DISTINCT FROM EXCLUDED.hash
RETURNING id;
`;
const lineHash = this.generateLineHash(data)
if (lineHash == undefined) {
console.error("[x] Ignorando linea ", data)
return;
}
const values = [
data.simId,
data.iccid,
data.msisdn,
data.imei,
data.imeiChangeDate,
data.offerCode,
data.status,
data.preactivationDate,
data.activationDate,
data.commercialStatus,
data.commercialStatusDate,
data.billingStatus,
data.billingStatusChangeDate,
data.billingActivationDate,
data.createDate || new Date(), // Default a ahora si no viene
JSON.stringify(data.raw), // El driver de pg requiere string o el objeto directo para JSONB
lineHash
];
let client: PoolClient | undefined = undefined;
try {
client = await this.pgClient.connect();
const res = await client.query<{ id: number }>(query, values);
return res.rows[0];
} catch (err) {
console.error('Error en la inserción:', err);
throw err;
} finally {
if (client != undefined) {
client.release()
}
}
}
}

View File

@@ -5,20 +5,6 @@
"description": "", "description": "",
"main": "index.ts", "main": "index.ts",
"imports": { "imports": {
"#config/*.js": {
"types": "./config/*.ts",
"default": "./config/*.js"
},
"#config/*": {
"types": "./config/*.ts",
"default": "./config/*.js"
},
"#shared/*.js": {
"default": "../sim-shared/*.js"
},
"#shared/*": {
"default": "../sim-shared/*.js"
},
"#adapters/*.js": { "#adapters/*.js": {
"types": "./infrastructure/*.ts", "types": "./infrastructure/*.ts",
"default": "./infrastructure/*.js" "default": "./infrastructure/*.js"
@@ -45,8 +31,8 @@
} }
}, },
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1", "test": "node --import tsx --test ./**/*.test.ts",
"build": "tsc --build && tsc-alias -p tsconfig.json && cp package.json ../../dist/packages/sim-objenious-cron/", "build": "tsc --build && tsc-alias -p tsconfig.json && cp .env package.json ../../dist/packages/sim-objenious-cron/",
"dev": "tsx watch index.ts", "dev": "tsx watch index.ts",
"start": "node ../../dist/packages/sim-objenious-cron/index.js" "start": "node ../../dist/packages/sim-objenious-cron/index.js"
}, },

View File

@@ -1,12 +1,13 @@
import { env } from "#config/env/index.js"; import { env } from "../config/env/index.js";
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"; import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
import axios from "axios"; import axios from "axios";
import { IOperationsRepository, Objenious, ObjeniousOperation, ObjeniousOperationChange, StatusEnum } from "sim-shared/domain/operationsRepository.port.js"; import { IOperationsRepository, Objenious, ObjeniousOperation, ObjeniousOperationChange, StatusEnum } from "sim-shared/domain/operationsRepository.port.js";
import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js"; import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js";
import { ObjeniousOperationsRepository } from "packages/sim-shared/infrastructure/ObjeniousOperationRepository.js";
export class CheckObjeniousRequests { export class CheckObjeniousRequests {
constructor( constructor(
private readonly operationsRepository: IOperationsRepository, private readonly operationsRepository: ObjeniousOperationsRepository,
private readonly orderRepository: OrderRepository, private readonly orderRepository: OrderRepository,
private readonly httpClient: HttpClient private readonly httpClient: HttpClient
) { ) {
@@ -16,6 +17,7 @@ export class CheckObjeniousRequests {
* TODO: meter a una funcion a parte task con los 3 pasos * TODO: meter a una funcion a parte task con los 3 pasos
*/ */
public async getPendingOperations() { public async getPendingOperations() {
console.log("[i] Inicio revision de peticiones")
// 1. Se obtienen todas las operaciones pendientes de la BDD // 1. Se obtienen todas las operaciones pendientes de la BDD
const pendingOperations = await this.operationsRepository.getPendingOperations() const pendingOperations = await this.operationsRepository.getPendingOperations()
@@ -49,11 +51,14 @@ export class CheckObjeniousRequests {
console.log("[cron] Solicitando status para", merged.map(e => e.id)) console.log("[cron] Solicitando status para", merged.map(e => e.id))
const result = await this.getMassActionsStatus(merged) const result = await this.getMassActionsStatus(merged)
console.log("[o] Revisión de eventos completa")
} }
/** /**
* Para una lista de operaciones **con mass_action_id** se comprueba si han tenido alguna actualizacion * Para una lista de operaciones **con mass_action_id** se comprueba si han tenido alguna actualizacion
* Devuelve el numero de operaciones comprobadas. * Devuelve el numero de operaciones comprobadas.
* TODO: Esto va en un repositorio
*/ */
private async getMassActionsStatus(requestList: ObjeniousOperation[]) { private async getMassActionsStatus(requestList: ObjeniousOperation[]) {
if (requestList.length == 0) return 0; if (requestList.length == 0) return 0;
@@ -119,9 +124,6 @@ export class CheckObjeniousRequests {
if (uorStatus == "finished") { if (uorStatus == "finished") {
console.log(" ****> Status", uorStatus) console.log(" ****> Status", uorStatus)
if (uorStatus != "finished") {
console.error("!!! Notificando estado no finished")
}
const targetIccids = originalAction.iccids const targetIccids = originalAction.iccids
const lineData = await this.getLineData(targetIccids) const lineData = await this.getLineData(targetIccids)
console.log("[i] lineData", lineData.content[0]) console.log("[i] lineData", lineData.content[0])
@@ -136,7 +138,7 @@ export class CheckObjeniousRequests {
}) })
} }
if (originalAction.operation == "activation") { if (originalAction.operation == "activate") {
this.notifyFinalization({ this.notifyFinalization({
...originalAction, ...originalAction,
msisdn msisdn
@@ -215,7 +217,7 @@ export class CheckObjeniousRequests {
const PATH = "/actions/requests/" const PATH = "/actions/requests/"
const operationsList = structuredClone(requestList) const operationsList = structuredClone(requestList)
// TODO: El for es gigantesco hay que simplificar partes
for (const request of operationsList) { for (const request of operationsList) {
if (request.id == undefined) continue; if (request.id == undefined) continue;
@@ -228,13 +230,50 @@ export class CheckObjeniousRequests {
try { try {
res = await req res = await req
} catch (e) { } catch (e) {
console.error("Error comprobando el estado de ", request, e) console.error("[x] Error comprobando el estado de ", request, e)
//todo actualizar el estado para incluir el error continue;
}
// 2. Casos de error o id no generada
if (res.data.massActionIds.length == 0) {
// Si no hay es que *puede* que haya un problema o no se ha generado todavia
const reports = res.data.actionRequestReports
// Se entiende que no hay report ni id = está a la espera
if (reports.length == 0) continue;
// ! Hay minimo un report -> se considera error y se para
const updateData: ObjeniousOperationChange = {
operation_id: request.id,
new_status: "error",
error: JSON.stringify(reports[0].actionRequestReportDataDTOs)
}
const updateRes = await this.operationsRepository.updateOperation(updateData)
if (updateRes.error != undefined) {
console.error("[x] Error actualizando el estado de la operacion", updateData.error)
}
if (request.correlation_id != undefined) {
this.orderRepository.errorOrder({
correlation_id: request.correlation_id,
status: "failed",
error: "MassId no obtenida",
reason: "MassId no obtenida",
stackTrace: JSON.stringify(reports[0].actionRequestReportDataDTOs)
}).then(e => {
if (e.error != undefined) {
console.error("[x] Error actualizando el estado del Order con correlation_id: ", request.correlation_id)
console.error(e.error)
}
}).catch(e => {
console.error("[x] Error actualizando el estado del Order con correlation_id: ", request.correlation_id)
})
}
continue; continue;
} }
// 2. Modificacion del massId si ha habido un cambio
const massActionId = res.data.massActionIds[0] const massActionId = res.data.massActionIds[0]
// 3. Modificacion del massId si ha habido un cambio
try { try {
if (res.status == 200 && res.data != undefined && massActionId != undefined) { if (res.status == 200 && res.data != undefined && massActionId != undefined) {
const updateData: ObjeniousOperationChange = { const updateData: ObjeniousOperationChange = {
@@ -248,7 +287,7 @@ export class CheckObjeniousRequests {
request.mass_action_id = String(massActionId) request.mass_action_id = String(massActionId)
} }
} catch (e) { } catch (e) {
console.log("Error actualizando el estado de ", request) console.log("[x] Error actualizando el estado de ", request)
continue; continue;
} }
} }
@@ -262,6 +301,8 @@ export class CheckObjeniousRequests {
* al servicio que manda los mails * al servicio que manda los mails
*/ */
private async notifyFinalization(operation: ObjeniousOperation & { msisdn: string }) { private async notifyFinalization(operation: ObjeniousOperation & { msisdn: string }) {
console.log("[i] Enviando activacion a", env.NOTIFICATION_URL)
console.log("[i] Operation", operation)
const req = axios.post(env.NOTIFICATION_URL, { const req = axios.post(env.NOTIFICATION_URL, {
...operation, ...operation,
iccids: [operation.iccids] iccids: [operation.iccids]
@@ -270,7 +311,17 @@ export class CheckObjeniousRequests {
"x-apikey-sim-activation": env.SIM_ACTIVATION_API_KEY "x-apikey-sim-activation": env.SIM_ACTIVATION_API_KEY
} }
}) })
await req try {
const res = await req
if (res.status != 200) {
console.error("[x] Error enviando el mail de confirmacion para ", operation, " status ", res.status, res.statusText)
}
} catch (e) {
console.error("[x] Error enviando el mail de confirmacion para ", operation)
console.error(e)
}
} }
} }

View File

@@ -0,0 +1,189 @@
import { ObjeniousLine } from "sim-shared/domain/objeniousLine.js";
import { PauseCancelTaskRepository } from "sim-consumidor-objenious/infrastructure/PauseCancelTaskRepository.js";
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js";
import { SimUseCases } from "sim-consumidor-objenious/aplication/Sim.usecases.js";
import { OrderRepository } from "packages/sim-shared/infrastructure/OrderRepository.js";
const logger =
{
log: (...data: any[]) => console.log("[i] [TaskPauseTerminate]", ...data),
error: (...data: any[]) => console.error("[x] [TaskPauseTerminate] ", ...data),
}
export class PauseTerminateTask {
constructor(
private readonly objeniousRepo: ObjeniousOperationsRepository,
private readonly pauseRepo: PauseCancelTaskRepository,
private readonly simUsecases: SimUseCases,
private readonly orderRepo: OrderRepository
) {
}
public async run() {
const finError = (err: any) => {
logger.error("Finalizado con errores proceso de comprobacion de lineas en pausa o canceladas")
logger.error(err)
}
const finExito = () => {
logger.log("Finalizado con exito proceso de comprobacion de lineas en pausa o canceladas")
}
try {
logger.log("Iniciando proceso de comprobacion de lineas en pausa o canceladas")
// 1. Se comprueba cuantas peticiones hay qye revisar
const peticionesRevisar = await this.pauseRepo.getPending()
if (peticionesRevisar.error != undefined) {
finError(peticionesRevisar.error)
return 1;
}
logger.log(`Se van a revisar ${peticionesRevisar.data?.length} peticiones`)
if (peticionesRevisar.data == undefined || peticionesRevisar.data.length == 0) {
finExito()
return 0;
}
// 2. Se comprueba que alguna de las lineas haya dejado de estar en estado de test
const iccids = peticionesRevisar.data.map(e => e.iccid)
const lineasActualizadas: ObjeniousLine[] = []
const lineGenerator = this.objeniousRepo.getLinesByStatusAPI({
iccids: iccids
})
let lines = await lineGenerator.next()
if (lines.value.error != undefined || lines.value.data == undefined) {
logger.error("Error cargando las lineas", lines.value.error)
finError(lines.value.error)
return 1;
} else {
lineasActualizadas.push(...lines.value.data)
}
while (!lines.done) {
if (lines.value.error != undefined || lines.value.data == undefined) {
logger.error("Error cargando las lineas", lines.value.error)
finError(lines.value.error)
return 1;
} else {
lineasActualizadas.push(...lines.value.data)
}
lines = await lineGenerator.next()
}
console.log("Cargado: ", lineasActualizadas)
// 3. Se separan las lineas que se tienen que actualizar al no ser test
// y las que se tienen que reencolar al ser test
const lineasNoTest = lineasActualizadas.filter(e => e.status.billingStatus != "TEST")
const lineasTest = lineasActualizadas.filter(e => e.status.billingStatus == "TEST")
// 4. Las lineas de test se reencolan
// El proximo reintento es en 1 dia
const proximoReintento = new Date()
proximoReintento.setDate(new Date().getDate() + 1)
// 5. Reintentos en 1 dia
for (const linea of lineasTest) {
const lineaId = peticionesRevisar.data
.find(e => e.iccid == linea.identifier.iccid)?.id
if (lineaId == undefined) continue; // Esto puede ser un problema si se generaliza
this.pauseRepo.updateTask({
id: lineaId,
next_check: proximoReintento
})
}
// 6. Operaciones de pausa/cancelacion definitiva
for (const linea of lineasNoTest) {
const operacion = peticionesRevisar.data
.find(e => e.iccid == linea.identifier.iccid)
if (operacion == undefined) continue;
const dueDate = new Date()
dueDate.setMinutes(new Date().getMinutes() + 15)
const operacionTipo = operacion.operation_type
const actionData = operacion.action_data
const correlation_id = operacion.action_data.correlation_id
actionData.dueDate = dueDate.toISOString()
switch (linea.status.billingStatus) {
case "ACTIVATED":
let exito = false;
let result = null;
// IMPORTANTE COMRPOBAR EL DUE DATE
switch (operacionTipo) {
case "suspend":
result = await this.simUsecases.suspend(actionData)()
break;
case "terminate":
result = await this.simUsecases.terminate(actionData)()
break;
default:
break;
}
if (result == undefined) {
logger.error("Operacion desconocida", operacion)
} else if (result?.error != undefined) {
// error usecase
logger.error(result.error)
await this.pauseRepo.finishTask({
id: operacion.id,
error: result.error
})
if (correlation_id != undefined)
await this.orderRepo.errorOrder({
correlation_id: correlation_id,
status: "dlx",
reason: result.error
})
} else {
// ok
await this.pauseRepo.finishTask({ id: operacion.id })
if (correlation_id != undefined)
await this.orderRepo.finishOrder({ correlation_id })
}
break;
case "CANCELED":
await this.pauseRepo.finishTask({
id: operacion.id,
error: "billingStatus is CANCELED"
})
if (correlation_id != undefined)
await this.orderRepo.finishOrder({ correlation_id })
break;
case "SUSPENDED":
await this.pauseRepo.finishTask({
id: operacion.id,
error: "billingStatus is SUSPENDED"
})
if (correlation_id != undefined)
await this.orderRepo.finishOrder({ correlation_id })
break;
case "TEST":
// No puede ser
default:
logger.error("billingStatus desconocido", linea.status.billingStatus)
}
}
finExito()
} catch (e) {
finError(e)
}
return 0
}
}

View File

@@ -0,0 +1,54 @@
import { lineToCreateLineDto, ObjeniousLine } from "sim-shared/domain/objeniousLine.js";
import { ObjeniousLinesRepository } from "../infranstructure/ObjeniousLinesRepository.js";
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js";
export class TaskVolcadoLineas {
constructor(
private readonly linesRepository: ObjeniousLinesRepository,
private readonly objeniousRepository: ObjeniousOperationsRepository
) {
}
private async saveLines(lines: ObjeniousLine[]) {
const linesToCreate = lines.map(lineToCreateLineDto)
let created: number[] = []
for (const line of linesToCreate) {
// Si es lento pasar a Promise.all
const res = await this.linesRepository.insertOrUpdate(line)
if (res?.id != undefined)
created.push(res.id)
}
}
public async loadLines() {
console.log("[i] Iniciando task de volcado de lineas de Objenious")
// Carga todas las lineas en memoria, hay que comprobar que no se gaste demasiada
const linesIterator = this.objeniousRepository.getLinesByStatusAPI({
pageSize: 100
})
let lines = await linesIterator.next()
if (lines.value.error != undefined || lines.value.data == undefined) {
console.error("[x] Error cargando las lineas a volcar", lines.value.error)
return;
}
await this.saveLines(lines.value.data)
while (!lines.done) {
lines = await linesIterator.next()
if (lines.value.error != undefined || lines.value.data == undefined) {
console.error("[x] Error cargando las lineas a volcar", lines.value.error)
return;
}
await this.saveLines(lines.value.data)
}
console.log("[i] Terminado task de volcado de lineas de Objenious")
}
}

View File

@@ -1,3 +1,5 @@
import { Result } from "../domain/Result.js"
export type Validator<T extends Object> = { export type Validator<T extends Object> = {
field: keyof T, field: keyof T,
errorMsg: string, errorMsg: string,
@@ -16,10 +18,18 @@ export class BodyValidator<T extends Object> {
this.validatorList = validators this.validatorList = validators
} }
public validate(obj: T) { public validate(obj: T): Result<{ msg: string, field: string }, boolean> {
for (const validator of this.validatorList) { for (const validator of this.validatorList) {
if (validator.validationFunc(obj) == false) throw new Error(validator.errorMsg) if (validator.validationFunc(obj) == false)
return {
error: {
msg: validator.errorMsg,
field: String(validator.field)
}
}
} }
return true; return {
data: true
};
} }
} }

View File

@@ -1,16 +1,16 @@
import { test, describe } from "vitest" import { test, describe } from "vitest"
import { JWTService } from "./JWT.service.js" import { jwtService } from "../config/jwtService.config.js"
describe("Tokens Objenious", () => { describe("Tokens Objenious", () => {
const jwtService = new JWTService() const jwt = jwtService
test("Solicicitud normal de auth", async () => { test("Solicicitud normal de auth", async () => {
const token = await jwtService.getAccessToken() const token = await jwt.getAccessToken()
console.log("acceso objenious", token) console.log("acceso objenious", token)
}), }),
test("Solicicitud de refresh de auth", async () => { test("Solicicitud de refresh de auth", async () => {
const token = await jwtService.tryRefreshToken() const token = await jwt.tryRefreshToken()
console.log("acceso refresh objenious", token) console.log("acceso refresh objenious", token)
}) })
}) })

View File

@@ -4,24 +4,24 @@
* el cliente HTTP * el cliente HTTP
*/ */
import { env } from "#config/env/index.js";
import fs from "fs" import fs from "fs"
import { import {
JWTToken, JWTToken,
JWTHeader, JWTHeader,
IJWTService IJWTService,
JWTPayload
} from "sim-shared/domain/JWT.js" } from "sim-shared/domain/JWT.js"
import axios, { AxiosError } from "axios"; import axios, { AxiosError } from "axios";
type GrantAccessRequestBody = { export type GrantAccessRequestBody = {
grant_type: string, grant_type: string,
client_id: string, client_id: string,
client_assertion_type: string, client_assertion_type: string,
client_assertion: string client_assertion: string
} }
type TokensRequestResponse = { export type TokensRequestResponse = {
"access_token": string, "access_token": string,
"expires_in": number, "expires_in": number,
"refresh_token": string "refresh_token": string
@@ -32,41 +32,6 @@ type TokensRequestResponse = {
"scope": string "scope": string
} }
const PRIVATE_KEY_PATH = env.OBJ_PEM_PATH
const GET_TOKEN_URL = "https://idp.docapost.io/auth/realms/GETWAY/protocol/openid-connect/token"
const REFRESH_TOKEN_URL = GET_TOKEN_URL
const DEFAULT_BODY: GrantAccessRequestBody = {
grant_type: "client_credentials",
client_id: env.OBJ_CLIENT_ID,
client_assertion_type: "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
client_assertion: env.OBJ_CLI_ASSERTION
}
const REFRESH_BODY = {
...DEFAULT_BODY,
grant_type: "refresh_token",
}
const DEFAULT_HEADERS = {
"content-type": "application/x-www-form-urlencoded"
}
function addIATHeaders(authHeaders: Object) {
const headers = <JWTHeader>{
...authHeaders,
sub: env.OBJ_CLIENT_ID,
iss: env.OBJ_CLIENT_ID,
aud: GET_TOKEN_URL,
jti: Date.now().toString(),
iat: Math.floor(Date.now() / 1000),
exp: Math.floor(Date.now() / 1000) + 5 * 60,
}
return headers
}
export type ObjeniousTokenBody = any export type ObjeniousTokenBody = any
/** /**
@@ -82,27 +47,54 @@ export class JWTService implements IJWTService<ObjeniousTokenBody> {
public authToken: JWTToken<ObjeniousTokenBody> | undefined; public authToken: JWTToken<ObjeniousTokenBody> | undefined;
private refreshToken?: JWTToken<ObjeniousTokenBody> | undefined; private refreshToken?: JWTToken<ObjeniousTokenBody> | undefined;
constructor(args?: { // http
private transformHeaders?: (_: Object) => JWTHeader;
private defaultHttpHeaders: Record<string, string>;
private defaultBody: Record<string, string>;
// jwt
private defaultJWTHeaders: JWTHeader;
private defaultJWTPayload: JWTPayload<any>;
private privateKeyPath: string;
private tokenUrl: string;
private refreshTokenUrl: string;
constructor(args: {
token?: string // si se partiese de un token existente, token?: string // si se partiese de un token existente,
refreshToken?: string refreshToken?: string,
transformJWTHeaders?: (_: Object) => JWTHeader,
defaultHeaders: Record<string, string>,
defaultBody: Record<string, string>,
defaultJWTHeaders: JWTHeader,
defaultJWTPayload: JWTPayload<any>,
privateKeyPath: string,
tokenUrl: string,
refreshTokenUrl: string
}) { }) {
if (args?.token != undefined) this.authToken = new JWTToken(args.token) if (args?.token != undefined) this.authToken = new JWTToken(args.token)
if (args?.refreshToken != undefined) this.refreshToken = new JWTToken(args.refreshToken) if (args?.refreshToken != undefined) this.refreshToken = new JWTToken(args.refreshToken)
if (args?.transformJWTHeaders != undefined) this.transformHeaders = args.transformJWTHeaders
this.defaultHttpHeaders = args.defaultHeaders;
this.defaultBody = args.defaultBody;
this.defaultJWTHeaders = args.defaultJWTHeaders;
this.defaultJWTPayload = args.defaultJWTPayload;
this.privateKeyPath = args.privateKeyPath;
this.tokenUrl = args.tokenUrl;
this.refreshTokenUrl = args.refreshTokenUrl;
} }
private buildJwtBody() { private buildJwtBody() {
const jwtHeaders = { const jwtHeaders = this.defaultJWTHeaders
alg: "RS256",
typ: "JWT", const jwtData = (this.transformHeaders) ?
kid: env.OBJ_KID this.transformHeaders(this.defaultJWTPayload) :
} this.defaultJWTPayload;
const jwtData = addIATHeaders({
sub: env.OBJ_CLIENT_ID, const key = fs.readFileSync(this.privateKeyPath, "utf8")
iss: env.OBJ_CLIENT_ID,
aud: "https://idp.docapost.io/auth/realms/GETWAY",
jti: Date.now().toString(),
})
const key = fs.readFileSync(PRIVATE_KEY_PATH, "utf8")
const token = JWTToken.fromParts({ const token = JWTToken.fromParts({
header: jwtHeaders, header: jwtHeaders,
payload: jwtData, payload: jwtData,
@@ -116,14 +108,16 @@ export class JWTService implements IJWTService<ObjeniousTokenBody> {
public async getNewAuthToken() { public async getNewAuthToken() {
const bodyWithtoken = { const bodyWithtoken = {
...DEFAULT_BODY, ...this.defaultBody,
client_assertion: this.buildJwtBody() client_assertion: this.buildJwtBody()
} }
const req = axios.post(GET_TOKEN_URL, const headers = (this.transformHeaders) ? this.transformHeaders(this.defaultHttpHeaders) : this.defaultHttpHeaders;
const req = axios.post(this.tokenUrl,
bodyWithtoken, bodyWithtoken,
{ {
headers: addIATHeaders(DEFAULT_HEADERS) headers: headers
} }
) )
@@ -166,16 +160,21 @@ export class JWTService implements IJWTService<ObjeniousTokenBody> {
if (this.refreshToken == undefined) throw new Error("El refreshToken no está definido") if (this.refreshToken == undefined) throw new Error("El refreshToken no está definido")
if (this.refreshToken.isExpired()) throw new Error("El refreshToken ha expirado") if (this.refreshToken.isExpired()) throw new Error("El refreshToken ha expirado")
const refreshBody = {
...this.defaultBody,
grant_type: "refresh_token",
}
const body = { const body = {
...REFRESH_BODY, ...refreshBody,
client_assertion: this.buildJwtBody(), client_assertion: this.buildJwtBody(),
refresh_token: this.refreshToken.rawToken refresh_token: this.refreshToken.rawToken
} }
const req = axios.post(REFRESH_TOKEN_URL, const req = axios.post(this.refreshTokenUrl,
body, body,
{ {
headers: DEFAULT_HEADERS headers: this.defaultHttpHeaders
} }
) )

View File

@@ -7,9 +7,12 @@
import { env, loadEnvFile } from "node:process"; import { env, loadEnvFile } from "node:process";
import { Pool } from "pg"; import { Pool } from "pg";
import { PgClient } from "../infrastructure/PgClient.js"; import { PgClient } from "../infrastructure/PgClient.js";
import { HttpClient } from "../infrastructure/HTTPClient.js";
import { jwtService } from "./jwtService.config.js";
console.warn("[i!] Se está corriendo codigo de test") console.warn("[i!] Se está corriendo codigo de test")
loadEnvFile("../../.env") // Global loadEnvFile("../../.env") // Global
loadEnvFile("./test.env") // Local
// se hace una por servicio. // se hace una por servicio.
export const pgPool = new Pool({ export const pgPool = new Pool({
@@ -24,4 +27,14 @@ export const postgresClient = new PgClient({
pool: pgPool pool: pgPool
}) })
const OBJ_BASE_URL = "https://api-getway.objenious.com/ws"
export const httpObjClient = new HttpClient({
baseURL: OBJ_BASE_URL,
headers: {
"content-type": " application/json; charset=utf-8"
},
jwtManager: jwtService
})
console.warn(`[T] TEST DB : ${env.POSTGRES_DATABASE}@${env.POSTGRES_HOST}`) console.warn(`[T] TEST DB : ${env.POSTGRES_DATABASE}@${env.POSTGRES_HOST}`)

View File

@@ -0,0 +1,67 @@
import assert from "assert"
import { env, loadEnvFile } from "process"
import { GrantAccessRequestBody, JWTService } from "sim-shared/aplication/JWT.service.js"
import { JWTHeader } from "sim-shared/domain/JWT.js"
loadEnvFile("../../.env") // Global
loadEnvFile("./test.env") // Local
assert(env.OBJ_CLIENT_ID != undefined)
assert(env.OBJ_CLI_ASSERTION != undefined)
assert(env.OBJ_PEM_PATH != undefined)
const PRIVATE_KEY_PATH = env.OBJ_PEM_PATH
const GET_TOKEN_URL = "https://idp.docapost.io/auth/realms/GETWAY/protocol/openid-connect/token"
const REFRESH_TOKEN_URL = GET_TOKEN_URL
const DEFAULT_BODY: GrantAccessRequestBody = {
grant_type: "client_credentials",
client_id: env.OBJ_CLIENT_ID,
client_assertion_type: "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
client_assertion: env.OBJ_CLI_ASSERTION
}
const DEFAULT_HEADERS = {
"content-type": "application/x-www-form-urlencoded"
}
const DEFAULT_HEADERS_JWT = {
alg: "RS256",
typ: "JWT",
kid: env.OBJ_KID,
}
const DEFAULT_DATA_JWT = {
sub: env.OBJ_CLIENT_ID,
iss: env.OBJ_CLIENT_ID,
aud: "https://idp.docapost.io/auth/realms/GETWAY",
jti: Date.now().toString(),
}
function addIATHeaders(authHeaders: Object) {
const headers = <JWTHeader>{
...authHeaders,
sub: env.OBJ_CLIENT_ID,
iss: env.OBJ_CLIENT_ID,
aud: GET_TOKEN_URL,
jti: Date.now().toString(),
iat: Math.floor(Date.now() / 1000),
exp: Math.floor(Date.now() / 1000) + 5 * 60,
}
return headers
}
export const jwtService = new JWTService({
transformJWTHeaders: addIATHeaders,
defaultHeaders: DEFAULT_HEADERS,
defaultBody: DEFAULT_BODY,
defaultJWTHeaders: DEFAULT_HEADERS_JWT,
defaultJWTPayload: DEFAULT_DATA_JWT,
privateKeyPath: PRIVATE_KEY_PATH,
tokenUrl: GET_TOKEN_URL,
refreshTokenUrl: REFRESH_TOKEN_URL
})

View File

@@ -62,11 +62,14 @@ export type CreateOrderDTO = Pick<
'correlation_id' | 'exchange' | 'routing_key' | 'order_type' | 'payload' | 'webhook_host' | 'webhook_endpoint' 'correlation_id' | 'exchange' | 'routing_key' | 'order_type' | 'payload' | 'webhook_host' | 'webhook_endpoint'
>; >;
export type UpdateOrderDTO = type IdOrCorrelationID =
( (
{ id: number, correlation_id?: never } | { id: number, correlation_id?: never } |
{ id?: never, correlation_id: string } { id?: never, correlation_id: string }
) )
export type UpdateOrderDTO =
IdOrCorrelationID
& &
{ {
new_status: OrderStatus, new_status: OrderStatus,
@@ -74,12 +77,20 @@ export type UpdateOrderDTO =
} }
export type FinishOrderDTO = export type FinishOrderDTO =
( IdOrCorrelationID
{ id: number, correlation_id?: never } |
{ id?: never, correlation_id: string }
)
& &
{ {
reason?: string reason?: string
} }
export type ErrorOrderDTO =
IdOrCorrelationID
&
{
status: "failed" | "dlx",
reason: string,
error?: string,
stackTrace?: string
}

View File

@@ -1,14 +1,30 @@
export type Success<D> = {
error?: undefined | null,
data: D
}
export type Failure<E = Error> = {
data?: undefined | null,
error: E
}
/** /**
* Result<Error,Data> * Result<Error,Data>
*/ */
export type Result<E, D> = export type Result<E, D> = Failure<E> | Success<D>
{
error: E, export async function tryCatch<T>(func: Promise<T>): Promise<Result<Error, T>> {
data?: undefined try {
} const res = await func;
| return {
{ data: res
error?: undefined, }
data: D } catch (e: unknown) {
} return {
error: e as Error
}
}
}

View File

@@ -0,0 +1,144 @@
export type ObjeniousLineResponse = {
content: ObjeniousLine[],
offset: number,
pageNumber: number,
pageSize: number,
paged: boolean,
totalPages: number,
totalElements: number
}
export type ObjeniousLine = {
identifier: {
simId: number,
iccid: string,
imsi: string,
msisdn: string,
amsisdn?: string,
imei: string
},
simCardType: {
code: string,
description: string
},
device: {
imei: string,
imeiChangeDate: string, //Fecha iso
deviceReference?: string | null,
manufacturer?: string | null,
},
customerAccount: {
code: string,
label: string,
address: {
address1: string,
address2: string,
address3: string,
zipCode: string,
city: string,
country: string,
state?: string | null
}
},
offer: {
code: string,
description: string,
},
party: {
name: string,
code: string,
contractReference: string,
partyType: string,
},
lineCustomFields: {
custom1: {
label: string | null,
value: string | null
},
custom2: {
label: string | null,
value: string | null
},
custom3: {
label: string | null,
value: string | null
},
custom4: {
label: string | null,
value: string | null
},
custom5: {
label: string | null,
value: string | null
},
custom6: {
label: string | null,
value: string | null
}
},
status: {
status: string,
preactivationDate: string | null, //"2026-03-17",
activationDate: string | null, //"2026-03-17T11:04:11.408+00:00",
commercialStatus: string, //"test",
commercialStatusDate: string, //"2026-03-17T11:41:01.493+00:00",
networkStatus: string, // "ACTIVATED",
billingStatus: "ACTIVATED" | "SUSPENDED" | "CANCELED" | "TEST",
billingStatusChangeDate: string | null, // "2026-03-17T11:01:00.276+00:00",
billingActivationDate: string | null //,
createdDate: string | null,//"2026-01-30T01:50:02.060+00:00"
},
services: string | null
};
export type ObjeniousLineDb = {
id: number;
simId?: number;
iccid: string;
msisdn?: string;
imei?: string;
imeiChangeDate?: Date;
offerCode?: string;
status?: string;
preactivationDate?: Date | null;
activationDate?: Date | null;
commercialStatus?: string;
commercialStatusDate?: Date | null;
billingStatus?: string;
billingStatusChangeDate?: Date | null;
billingActivationDate?: Date | null;
createDate?: Date | null;
raw: ObjeniousLine;
}
// DTO para inserción (omite el ID autogenerado)
export type CreateObjeniousLineDTO = Omit<ObjeniousLineDb, 'id'>;
export function lineToCreateLineDto(line: ObjeniousLine): CreateObjeniousLineDTO {
const dateOrNull = (data: string | null) => {
if (data == null) return null;
return new Date(data)
}
const transformed: CreateObjeniousLineDTO = {
simId: line.identifier.simId,
iccid: line.identifier.iccid,
msisdn: line.identifier.msisdn,
imei: line.identifier.imei,
imeiChangeDate: new Date(line.device.imeiChangeDate),
offerCode: line.offer.code,
status: line.status.status,
preactivationDate: dateOrNull(line.status.preactivationDate),
activationDate: dateOrNull(line.status.activationDate),
commercialStatus: line.status.commercialStatus,
commercialStatusDate: dateOrNull(line.status.commercialStatusDate),
billingStatus: line.status.billingStatus,
billingStatusChangeDate: dateOrNull(line.status.activationDate),
billingActivationDate: dateOrNull(line.status.activationDate),
createDate: dateOrNull(line.status.activationDate),
raw: line
}
return transformed;
}

View File

@@ -12,7 +12,7 @@ export type ObjeniousOperation = {
id?: number; id?: number;
/** Uuid del mensaje asociado a la operacion */ /** Uuid del mensaje asociado a la operacion */
correlation_id?: string; correlation_id?: string;
operation: string; operation: "activate" | string; // TODO: completar y actualizar
retry_count?: number; retry_count?: number;
max_retry?: number; max_retry?: number;
max_date_retry?: string | null; max_date_retry?: string | null;
@@ -46,10 +46,34 @@ export namespace Objenious {
created: string, created: string,
status: "NEW" | "RUNNING" | "OK" | "KO" | "REPLAYED" | "CANCELLED" | "CLOSED" | "DISABLED", status: "NEW" | "RUNNING" | "OK" | "KO" | "REPLAYED" | "CANCELLED" | "CLOSED" | "DISABLED",
statusDate: string, statusDate: string,
actionType: "PREACTIVATION_AND_ACTIVATION" | string, // todo: añadir el resto actionType: ActionType
massActionIds: number[] massActionIds: number[],
actionRequestReports:
{
requestId: string,
actionRequestReportDataDTOs: [
{
data: string,
newData: string | null,
iccid: string,
dataStatus: DataStatus
}
]
}[],
} }
export type DataStatus = "DATA_INVALID_FORMAT" | "DATA_NOT_FOUND" | "DATA_NOT_ACTIVATED" | "SERVICE_DATA_NOT_ACTIVATED" |
"DATA_WRONG_STATUS" | "DATA_NOT_AUTHORIZED" | "DATA_CUSTOMER_ACCOUNT_NOT_AUTHORIZED" | "DATA_AMBIGUOUS" |
"NEW_DATA_INVALID_FORMAT" | "NEW_DATA_ALREADY_EXISTS" | "DUPLICATE_DATA" | "DATA_TERMINATION_VALIDATED" |
"DATA_TERMINATION_SECURISED" | "MAX_ALARM_INSTANCE" | "MAX_ALARM_INSTANCE_TO_CATCH_UP" |
"ACTIVATED_LINE_CANNOT_BE_TRANSFERED" | "ESIM_WRONG_STEP" | "ESIM_WRONG_PAIRED_VALUE" |
"ESIM_WRONG_DOWNLOAD_STATE" | "ESIM_WRONG_STATUS" | "ESIM_WRONG_FAMILY" | "ESIM_WRONG_CATEGORY" |
"ENTITY_STATUS_NOT_AUTHORIZED" | "LONG_LIFE_NOT_ALLOWED" | "RCARD_NOT_COMPATIBLE" | "APN_NOT_FOUND" |
"APN_OR_DNN_NOT_FOUND" | "APN_CONFIGURATION_NOT_FOUND" | "APN_CONFIGURATION_INVALID_PARAMETER_FILE" |
"IP_NOT_AVAILABLE" | "RADIUS_FIELD_LENGTH_NOT_ALLOWED" | "RADIUS_LOGIN_OR_PASSWORD_NOT_FOUND" | "RADIUS_PASSWORD_NOT_ALLOWED" |
"RADIUS_LOGIN_NOT_ALLOWED" | "NETWORK_NOT_ACTIVATED" | "CHANGE_CUSTOMER_ACCOUNT_NOT_AllOWED" | "CHANGE_OFFER_NOT_ALLOWED" |
"SIM_NOT_EUICC" | "OFFER_NOT_WSF_PALIER_FLOTTE_FR"
export type ActionType = "PREACTIVATION" | "PREACTIVATION_ACTIVATION" | "ACTIVATION" | export type ActionType = "PREACTIVATION" | "PREACTIVATION_ACTIVATION" | "ACTIVATION" |
"STATUS_CHANGE" | "ICCID_CHANGE" | "EUICC_NOTIFICATION" "STATUS_CHANGE" | "ICCID_CHANGE" | "EUICC_NOTIFICATION"
| "EUICC_AUDIT" | "MSISDN_CHANGE" | "ALARM_SETTING" | "EUICC_AUDIT" | "MSISDN_CHANGE" | "ALARM_SETTING"

View File

@@ -0,0 +1,14 @@
import { describe, it } from "node:test";
import { ObjeniousOperationsRepository } from "./ObjeniousOperationRepository.js";
import { httpObjClient, postgresClient } from "../config/config.test.js";
describe("[Integration] Test API requests", () => {
const repository = new ObjeniousOperationsRepository(
httpObjClient,
postgresClient
)
it("Read /lines with multiple iccids", () => {
})
})

View File

@@ -1,14 +1,139 @@
import { IOperationsRepository, ObjeniousOperation, ObjeniousOperationChange } from "sim-shared/domain/operationsRepository.port.js"; import { IOperationsRepository, ObjeniousOperation, ObjeniousOperationChange } from "sim-shared/domain/operationsRepository.port.js";
import { Result } from "sim-shared/domain/Result.js"; import { Result, tryCatch } from "sim-shared/domain/Result.js";
import { PgClient } from "sim-shared/infrastructure/PgClient.js"; import { PgClient } from "sim-shared/infrastructure/PgClient.js";
import { ObjeniousLine, ObjeniousLineResponse } from "../domain/objeniousLine.js";
import { HttpClient } from "./HTTPClient.js";
import assert from "node:assert";
import { AxiosResponse } from "axios";
export class ObjeniousOperationsRepository implements IOperationsRepository { export class ObjeniousOperationsRepository implements IOperationsRepository {
constructor( constructor(
private http: HttpClient,
private readonly pgClient: PgClient private readonly pgClient: PgClient
) { ) {
} }
/**
* Consulta el estado de una o mas lineas directamente a la API de Objenious
* TODO: No hay paginacion como en getLinesByStatusAPI
*/
public async getLinesAPI(
identifierType: "ICCID" | "IMSI" | "IMEI" | "MSISDN" | "REFERENCE",
identifiers: string[]
): Promise<Result<string, ObjeniousLine[]>> {
if (identifiers.length == 0) {
return {
data: []
}
}
// Comprobar < MAX_PAGE_SIZE (Poco probable)
const path = "/lines"
const params = {
"identifier.identifierType": identifierType,
"identifier.identifiers": identifiers.toString()
}
const req = this.http.client.get<ObjeniousLineResponse>(path, {
params: params
})
const res = await tryCatch(req)
if (res.error != undefined) {
return {
error: res.error?.message
}
}
const lines = res.data.data.content
return {
data: lines
}
}
private MAX_PAGE_SIZE = 1000
public async * getLinesByStatusAPI(args?: {
pageSize?: number,
pageNumber?: number,
status?: string,
iccids?: string[]
}): AsyncGenerator<Result<string, ObjeniousLine[]>, Result<string, ObjeniousLine[]>, any> {
const path = "/lines"
const pageSize = args?.pageSize ?? this.MAX_PAGE_SIZE;
let currentPage = args?.pageNumber ?? 0;
let totalPages: number | undefined = undefined; // Como limite de paginas, igual es pasarse pero hasta que se lea
const params: Record<string, string | number> = {}
// Si se va a filtrar por iccids especificamente, en un futuro habra que ampliar el tipo de filtros
if (args?.iccids != undefined) {
params["identifier.identifierType"] = "ICCID"
params["identifier.identifiers"] = args.iccids.toString()
}
const loadNextLine = async (page: number): Promise<Result<string, ObjeniousLine[]>> => {
if (args?.status != undefined) params["simStatus"] = args.status
params["pageSize"] = pageSize
params["pageNumber"] = page
console.log(`[i] Cargando pagina ${currentPage} de ${totalPages ?? "(desc)"}`)
const nextPage = await tryCatch<AxiosResponse<ObjeniousLineResponse>>(this.http.client.get(path, {
params: params
}))
if (nextPage.error != undefined) {
console.error(nextPage.error)
return {
error: nextPage.error.message
}
}
// Se aumenta para la siguiente ejecucion
console.log(`[i] Página ${currentPage} completa, total: ${nextPage.data.data.totalPages}`)
totalPages = nextPage.data.data.totalPages
return {
data: nextPage.data.data.content
}
}
// El inicio se ejecuta siempre
const lines = await loadNextLine(currentPage)
if (lines.error != undefined) {
console.error("[x] Error obteniendo las lineas, cancelando operación");
return {
error: "Error cargando lineas"
}
}
currentPage++;
yield {
data: lines.data
}
// Copia para evitar bucles infinitos por error de la api
const maxPages = totalPages
assert.ok(maxPages != undefined, "No se ha defindo el numero de paginas") // Nunca deberia pasar pero así se evitan bucles infnitos
console.log("maxPages", maxPages)
for (let i = currentPage; i < maxPages!; i++) {
console.log("Bucle i:", i, "page: ", currentPage)
yield await loadNextLine(currentPage);
currentPage++;
}
return {
data: []
}
}
async createOperation(data: ObjeniousOperation): Promise<Result<string, ObjeniousOperation>> { async createOperation(data: ObjeniousOperation): Promise<Result<string, ObjeniousOperation>> {
const query = ` const query = `
INSERT INTO objenious_operation (operation, iccids, status, max_retry, request_id) INSERT INTO objenious_operation (operation, iccids, status, max_retry, request_id)
@@ -46,7 +171,7 @@ export class ObjeniousOperationsRepository implements IOperationsRepository {
request_id = COALESCE($4, request_id), request_id = COALESCE($4, request_id),
mass_action_id = COALESCE($5, mass_action_id), mass_action_id = COALESCE($5, mass_action_id),
last_change_date = now() at time zone 'utc', last_change_date = now() at time zone 'utc',
end_date = CASE WHEN $2 IN ('finished') THEN now() at time zone 'utc' ELSE end_date END, end_date = CASE WHEN $2 IN ('finished','error') THEN now() at time zone 'utc' ELSE end_date END,
objenious_status = $6 objenious_status = $6
WHERE id = $1`; WHERE id = $1`;

View File

@@ -27,7 +27,7 @@ describe("Test OrderRepository", {}, (ctx) => {
before(async () => { before(async () => {
// Order1 // Order1
const result1 = await orderRepo.createOrder(order1) const result1 = await orderRepo.createOrder(order1)
assert(result1.data != undefined) assert.ok(result1.data != undefined, result1.error as string)
testIds.push(result1.data.id) testIds.push(result1.data.id)
// Order2 -> Para el test de crearOrder // Order2 -> Para el test de crearOrder

View File

@@ -2,11 +2,10 @@
* TODO: Usar * TODO: Usar
*/ */
import { PoolClient, QueryResult, QueryResultRow } from "pg"; import { PoolClient, QueryResult, QueryResultRow } from "pg";
import { CreateOrderDTO, FinishOrderDTO, OrderTracking, UpdateOrderDTO } from "../domain/Order.js"; import { CreateOrderDTO, ErrorOrderDTO, FinishOrderDTO, OrderTracking, UpdateOrderDTO } from "../domain/Order.js";
import { Result } from "../domain/Result.js"; import { Result, tryCatch } from "../domain/Result.js";
import { PgClient } from "./PgClient.js"; import { PgClient } from "./PgClient.js";
import assert from "node:assert"; import assert from "node:assert";
import { error } from "node:console";
/** /**
* Agrupa todas las operaciones de *Order*. * Agrupa todas las operaciones de *Order*.
@@ -19,9 +18,8 @@ import { error } from "node:console";
*/ */
export class OrderRepository { export class OrderRepository {
constructor( constructor(
private readonly pgClient: PgClient private readonly pgClient: PgClient,
) { ) {
} }
/** /**
@@ -57,6 +55,8 @@ export class OrderRepository {
} }
} }
/** /**
* El tipo <T> representa el contenido del mensaje de los order * El tipo <T> representa el contenido del mensaje de los order
*/ */
@@ -191,6 +191,8 @@ export class OrderRepository {
const orderId = currentOrderResult.data?.id const orderId = currentOrderResult.data?.id
if (orderId == undefined) { if (orderId == undefined) {
await client.query("ROLLBACK")
client.release()
return { return {
error: "El order a actualizar no existe " + idType + ": " + idValue error: "El order a actualizar no existe " + idType + ": " + idValue
} }
@@ -261,7 +263,6 @@ export class OrderRepository {
return updatedOrder return updatedOrder
} }
public async finishOrder(args: FinishOrderDTO) { public async finishOrder(args: FinishOrderDTO) {
const client = await this.pgClient.connect(); const client = await this.pgClient.connect();
assert((args.id != undefined) != (args.correlation_id != undefined)) assert((args.id != undefined) != (args.correlation_id != undefined))
@@ -281,6 +282,8 @@ export class OrderRepository {
const orderId = currentOrderResult.data?.id const orderId = currentOrderResult.data?.id
if (orderId == undefined) { if (orderId == undefined) {
await client.query("ROLLBACK")
client.release()
return { return {
error: "El order a actualizar no existe " + idType + ": " + idValue error: "El order a actualizar no existe " + idType + ": " + idValue
} }
@@ -353,22 +356,19 @@ export class OrderRepository {
} }
// TODO: tema de poder filtrar por correlation_id // TODO: tema de poder filtrar por correlation_id
public async errorOrder(args: { public async errorOrder(args: ErrorOrderDTO): Promise<Result<string, OrderTracking<any>>> {
id: number,
status: "failed" | "dlx",
reason: string,
error?: string,
stackTrace?: string
}) {
const client = await this.pgClient.connect(); const client = await this.pgClient.connect();
await client.query('BEGIN'); await client.query('BEGIN');
const idType = ('id' in args) ? "id" : "correlation_id"
const idValue = (args.id != undefined) ? args.id : args.correlation_id
// 1. Se consulta la order de base // 1. Se consulta la order de base
const qCurrentOrder = ` const qCurrentOrder = `
SELECT * FROM order_tracking SELECT * FROM order_tracking
WHERE id = $1 WHERE ${idType} = $1
` `
const vCurrentOrder = [args.id] const vCurrentOrder = [idValue]
const currentOrderResult = await this.getFirst(client.query<OrderTracking<any>>(qCurrentOrder, vCurrentOrder)) const currentOrderResult = await this.getFirst(client.query<OrderTracking<any>>(qCurrentOrder, vCurrentOrder))
@@ -378,6 +378,7 @@ export class OrderRepository {
return currentOrderResult return currentOrderResult
} }
const id = currentOrderResult.data.id // Saco el id para evitar busacr por correlation_id que es mas lento
const currentOrder = currentOrderResult.data! const currentOrder = currentOrderResult.data!
// 3. Si todo ok se actualiza el order // 3. Si todo ok se actualiza el order
@@ -395,7 +396,7 @@ export class OrderRepository {
WHERE id = $1 WHERE id = $1
RETURNING id, status, update_date; RETURNING id, status, update_date;
` `
const vOrderTracking = [args.id, args.status, args.error, args.stackTrace] const vOrderTracking = [id, args.status, args.error, args.stackTrace]
const updatedOrderResult = await this.getFirst( const updatedOrderResult = await this.getFirst(
client.query<{ id: number, status: string, update_date: string }>(uOrderTracking, vOrderTracking) client.query<{ id: number, status: string, update_date: string }>(uOrderTracking, vOrderTracking)
) )

View File

@@ -0,0 +1,14 @@
## ENV PARA DATOS DE TEST - shared nunca se lanza en produccion
# claves de Objenious
OBJ_PEM_PATH=./obj.pem
OBJ_AUTHORIZATION=XOc7FtwXD8hUX2SFVX94XSty8wkOmChkwDNF09O_aIxPubMDdFUdCDCB4zpzSIxi8nOcTg7r_LM_nmd5qm7uLbksf_XArjI8iAyhjKz_2BAXPhmvKs4Fc9f3vv5LDfCVrPB9lP8P7rJ66_qnWs4jvhLQxSfn29m96hgXeCf8oySdIDUjN2q9Js3KAS5LL52Ri6ryvUeO1PvMhaPQMWRqoHIqTV1wPfPtiqQwcjUPmu5GeW164Kq1JLgV3KaGzfCZ9Qv9lbv30EJrukXxWuLCAhBS0kzrBXZoWvf2pb9uh3Am_93_dDxiIGQfIap9ZU_m8ZD1HPgvZOMCY6ZkxQconQ
OBJ_CLI_ASSERTION=XOc7FtwXD8hUX2SFVX94XSty8wkOmChkwDNF09O_aIxPubMDdFUdCDCB4zpzSIxi8nOcTg7r_LM_nmd5qm7uLbksf_XArjI8iAyhjKz_2BAXPhmvKs4Fc9f3vv5LDfCVrPB9lP8P7rJ66_qnWs4jvhLQxSfn29m96hgXeCf8oySdIDUjN2q9Js3KAS5LL52Ri6ryvUeO1PvMhaPQMWRqoHIqTV1wPfPtiqQwcjUPmu5GeW164Kq1JLgV3KaGzfCZ9Qv9lbv30EJrukXxWuLCAhBS0kzrBXZoWvf2pb9uh3Am_93_dDxiIGQfIap9ZU_m8ZD1HPgvZOMCY6ZkxQconQ
OBJ_CLIENT_ID=savefamily_rest_ws
OBJ_KID=xNfbMiyL1ORXGP8lElhcv8nVaG3EJKye4Lc1YoN3I1E
OBJ_BASE_URL=https://api-getway.objenious.com/ws
# OBJ_BASE_URL=https://api-getway.objenious.com/ws/test
NOTIFICATION_URL="https://sf-sim-activation.savefamilygps.net/send-activation-mail"
# NOTIFICATION_URL="localhost"
SIM_ACTIVATION_API_KEY=9e48c4ac-1ab0-4397-b3f3-6c239200dfe6

View File

@@ -1,12 +1,3 @@
#/bin/bash #/bin/bash
rm deployment/database/init.sql
# init sql debe juntar todos los scripts de "base" (sin contar migraciones)
cat deployment/database/base/*.sql >deployment/database/init.sql
#cp deployment/database/esquema_final* deployment/database/init.sql
# compatibilidad con postgresql < 17
sed -i '/\\restrict/d' deployment/database/init.sql
sed -i '/\\unrestrict/d' deployment/database/init.sql
docker compose -f deployment/local/docker/docker-compose.yaml --project-directory ./ up --watch docker compose -f deployment/local/docker/docker-compose.yaml --project-directory ./ up --watch

182
yarn.lock
View File

@@ -187,6 +187,15 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@gar/promise-retry@npm:^1.0.0":
version: 1.0.2
resolution: "@gar/promise-retry@npm:1.0.2"
dependencies:
retry: "npm:^0.13.1"
checksum: 10/b91326999ce94677cbe91973079eabc689761a93a045f6a2d34d4070e9305b27f6c54e4021688c7080cb14caf89eafa0c0f300af741b94c20d18608bdb66ca46
languageName: node
linkType: hard
"@isaacs/fs-minipass@npm:^4.0.0": "@isaacs/fs-minipass@npm:^4.0.0":
version: 4.0.1 version: 4.0.1
resolution: "@isaacs/fs-minipass@npm:4.0.1" resolution: "@isaacs/fs-minipass@npm:4.0.1"
@@ -443,6 +452,18 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@sf-alvar/db-migrate@npm:1.0.6":
version: 1.0.6
resolution: "@sf-alvar/db-migrate@npm:1.0.6::__archiveUrl=https%3A%2F%2Fgit.savefamilygps.net%2Fapi%2Fpackages%2FSaveFamily%2Fnpm%2F%2540sf-alvar%252Fdb-migrate%2F-%2F1.0.6%2Fdb-migrate-1.0.6.tgz"
dependencies:
pg: "npm:^8.18.0"
yargs: "npm:^18.0.0"
bin:
db-migrate: lib/index.js
checksum: 10/070f1388ff1c6fd2d24c3139d779e871bc0db94f11dd2013aa7eb5728e3c21e594bac0f4d46f8f3132391a9903cca56d5c864862c622d70f24e0db0ffcbbbf0e
languageName: node
linkType: hard
"@standard-schema/spec@npm:^1.0.0": "@standard-schema/spec@npm:^1.0.0":
version: 1.1.0 version: 1.1.0
resolution: "@standard-schema/spec@npm:1.1.0" resolution: "@standard-schema/spec@npm:1.1.0"
@@ -563,22 +584,22 @@ __metadata:
linkType: hard linkType: hard
"@types/node@npm:*, @types/node@npm:^25.0.3": "@types/node@npm:*, @types/node@npm:^25.0.3":
version: 25.3.0 version: 25.3.3
resolution: "@types/node@npm:25.3.0" resolution: "@types/node@npm:25.3.3"
dependencies: dependencies:
undici-types: "npm:~7.18.0" undici-types: "npm:~7.18.0"
checksum: 10/061b00c8de070a606a052afaa4c45dca5f8d6a8e7e39c0c3e196bb650ee37e986bbb161991ea39076a05aada102f36b13c974528448a09efd8d36bdfee75de4b checksum: 10/883e8942b0ddf89f9aae56c4205af8d9a368acd6cab83aa052447a6c5e69ce2bc8ab3f54e549233ada160ba9216dad7f30c62c35867c584fe844ae99f7dea2e0
languageName: node languageName: node
linkType: hard linkType: hard
"@types/pg@npm:^8.16.0": "@types/pg@npm:^8.16.0":
version: 8.16.0 version: 8.18.0
resolution: "@types/pg@npm:8.16.0" resolution: "@types/pg@npm:8.18.0"
dependencies: dependencies:
"@types/node": "npm:*" "@types/node": "npm:*"
pg-protocol: "npm:*" pg-protocol: "npm:*"
pg-types: "npm:^2.2.0" pg-types: "npm:^2.2.0"
checksum: 10/c03346fbe87728a237f30a3d0a436b86ede88e1dc471782bf679a4d74d67ee2a96f953e7c04d73841d21b9db43a5bf2ccdf2cd4c75450ea57efd947049809b3a checksum: 10/fdfcaff97f0bd067bf4c4750592bd627a772c5ac4d4164332efe121f9fc2112479dcf913bafd91fe8e86581d5994897e5fd5b4faaf734a42719540037d3b64e7
languageName: node languageName: node
linkType: hard linkType: hard
@@ -627,7 +648,17 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@types/supertest@npm:*, @types/supertest@npm:^6.0.3": "@types/supertest@npm:*":
version: 7.2.0
resolution: "@types/supertest@npm:7.2.0"
dependencies:
"@types/methods": "npm:^1.1.4"
"@types/superagent": "npm:^8.1.0"
checksum: 10/5a322e29b81033e90ac50ab315d49559b21809ee39b5681ab7386819463e30d68e29c63c946023a1c353e7f13fb3f20d64dcb89d3d8a0fff569450501aff786c
languageName: node
linkType: hard
"@types/supertest@npm:^6.0.3":
version: 6.0.3 version: 6.0.3
resolution: "@types/supertest@npm:6.0.3" resolution: "@types/supertest@npm:6.0.3"
dependencies: dependencies:
@@ -769,7 +800,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"ansi-regex@npm:^6.0.1": "ansi-regex@npm:^6.2.2":
version: 6.2.2 version: 6.2.2
resolution: "ansi-regex@npm:6.2.2" resolution: "ansi-regex@npm:6.2.2"
checksum: 10/9b17ce2c6daecc75bcd5966b9ad672c23b184dc3ed9bf3c98a0702f0d2f736c15c10d461913568f2cf527a5e64291c7473358885dd493305c84a1cfed66ba94f checksum: 10/9b17ce2c6daecc75bcd5966b9ad672c23b184dc3ed9bf3c98a0702f0d2f736c15c10d461913568f2cf527a5e64291c7473358885dd493305c84a1cfed66ba94f
@@ -845,13 +876,13 @@ __metadata:
linkType: hard linkType: hard
"axios@npm:^1.13.3": "axios@npm:^1.13.3":
version: 1.13.5 version: 1.13.6
resolution: "axios@npm:1.13.5" resolution: "axios@npm:1.13.6"
dependencies: dependencies:
follow-redirects: "npm:^1.15.11" follow-redirects: "npm:^1.15.11"
form-data: "npm:^4.0.5" form-data: "npm:^4.0.5"
proxy-from-env: "npm:^1.1.0" proxy-from-env: "npm:^1.1.0"
checksum: 10/db726d09902565ef9a0632893530028310e2ec2b95b727114eca1b101450b00014133dfc3871cffc87983fb922bca7e4874d7e2826d1550a377a157cdf3f05b6 checksum: 10/a7ed83c2af3ef21d64609df0f85e76893a915a864c5934df69241001d0578082d6521a0c730bf37518ee458821b5695957cb10db9fc705f2a8996c8686ea7a89
languageName: node languageName: node
linkType: hard linkType: hard
@@ -887,11 +918,11 @@ __metadata:
linkType: hard linkType: hard
"brace-expansion@npm:^5.0.2": "brace-expansion@npm:^5.0.2":
version: 5.0.3 version: 5.0.4
resolution: "brace-expansion@npm:5.0.3" resolution: "brace-expansion@npm:5.0.4"
dependencies: dependencies:
balanced-match: "npm:^4.0.2" balanced-match: "npm:^4.0.2"
checksum: 10/8ba7deae4ca333d52418d2cde3287ac23f44f7330d92c3ecd96a8941597bea8aab02227bd990944d6711dd549bcc6e550fe70be5d94aa02e2fdc88942f480c9b checksum: 10/cfd57e20d8ded9578149e47ae4d3fff2b2f78d06b54a32a73057bddff65c8e9b930613f0cbcfefedf12dd117151e19d4da16367d5127c54f3bff02d8a4479bb2
languageName: node languageName: node
linkType: hard linkType: hard
@@ -1123,18 +1154,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"db-migrate@https://git.savefamilygps.net/alvarsanmartin/herramienta-migracion.git":
version: 1.1.0
resolution: "db-migrate@https://git.savefamilygps.net/alvarsanmartin/herramienta-migracion.git#commit=f84d68ba79161b9b06b747919979db00aac34b49"
dependencies:
pg: "npm:^8.18.0"
yargs: "npm:^18.0.0"
bin:
db-migrate: ./lib/index.js
checksum: 10/2468cfd14a5f218845f5437f530a68993a51b3998cdd9d0c7f28cdb810314200c471debac9ca19c34d4978907b7c4ced5c95e777eebd40c3baa795ad945d8892
languageName: node
linkType: hard
"debug@npm:4, debug@npm:^4.1.1, debug@npm:^4.3.4, debug@npm:^4.3.7, debug@npm:^4.4.0, debug@npm:^4.4.3": "debug@npm:4, debug@npm:^4.1.1, debug@npm:^4.3.4, debug@npm:^4.3.7, debug@npm:^4.4.0, debug@npm:^4.4.3":
version: 4.4.3 version: 4.4.3
resolution: "debug@npm:4.4.3" resolution: "debug@npm:4.4.3"
@@ -1226,15 +1245,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"encoding@npm:^0.1.13":
version: 0.1.13
resolution: "encoding@npm:0.1.13"
dependencies:
iconv-lite: "npm:^0.6.2"
checksum: 10/bb98632f8ffa823996e508ce6a58ffcf5856330fde839ae42c9e1f436cc3b5cc651d4aeae72222916545428e54fd0f6aa8862fd8d25bdbcc4589f1e3f3715e7f
languageName: node
linkType: hard
"env-paths@npm:^2.2.0": "env-paths@npm:^2.2.0":
version: 2.2.1 version: 2.2.1
resolution: "env-paths@npm:2.2.1" resolution: "env-paths@npm:2.2.1"
@@ -1242,13 +1252,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"err-code@npm:^2.0.2":
version: 2.0.3
resolution: "err-code@npm:2.0.3"
checksum: 10/1d20d825cdcce8d811bfbe86340f4755c02655a7feb2f13f8c880566d9d72a3f6c92c192a6867632e490d6da67b678271f46e01044996a6443e870331100dfdd
languageName: node
linkType: hard
"es-define-property@npm:^1.0.1": "es-define-property@npm:^1.0.1":
version: 1.0.1 version: 1.0.1
resolution: "es-define-property@npm:1.0.1" resolution: "es-define-property@npm:1.0.1"
@@ -1795,16 +1798,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"iconv-lite@npm:^0.6.2": "iconv-lite@npm:^0.7.0, iconv-lite@npm:^0.7.2, iconv-lite@npm:~0.7.0":
version: 0.6.3
resolution: "iconv-lite@npm:0.6.3"
dependencies:
safer-buffer: "npm:>= 2.1.2 < 3.0.0"
checksum: 10/24e3292dd3dadaa81d065c6f8c41b274a47098150d444b96e5f53b4638a9a71482921ea6a91a1f59bb71d9796de25e04afd05919fa64c360347ba65d3766f10f
languageName: node
linkType: hard
"iconv-lite@npm:^0.7.0, iconv-lite@npm:~0.7.0":
version: 0.7.2 version: 0.7.2
resolution: "iconv-lite@npm:0.7.2" resolution: "iconv-lite@npm:0.7.2"
dependencies: dependencies:
@@ -1918,9 +1912,10 @@ __metadata:
linkType: hard linkType: hard
"make-fetch-happen@npm:^15.0.0": "make-fetch-happen@npm:^15.0.0":
version: 15.0.3 version: 15.0.4
resolution: "make-fetch-happen@npm:15.0.3" resolution: "make-fetch-happen@npm:15.0.4"
dependencies: dependencies:
"@gar/promise-retry": "npm:^1.0.0"
"@npmcli/agent": "npm:^4.0.0" "@npmcli/agent": "npm:^4.0.0"
cacache: "npm:^20.0.1" cacache: "npm:^20.0.1"
http-cache-semantics: "npm:^4.1.1" http-cache-semantics: "npm:^4.1.1"
@@ -1930,9 +1925,8 @@ __metadata:
minipass-pipeline: "npm:^1.2.4" minipass-pipeline: "npm:^1.2.4"
negotiator: "npm:^1.0.0" negotiator: "npm:^1.0.0"
proc-log: "npm:^6.0.0" proc-log: "npm:^6.0.0"
promise-retry: "npm:^2.0.1"
ssri: "npm:^13.0.0" ssri: "npm:^13.0.0"
checksum: 10/78da4fc1df83cb596e2bae25aa0653b8a9c6cbdd6674a104894e03be3acfcd08c70b78f06ef6407fbd6b173f6a60672480d78641e693d05eb71c09c13ee35278 checksum: 10/4aa75baab500eff4259f2e1a3e76cf01ab3a3cd750037e4bd7b5e22bc5a60f12cc766b3c45e6288accb5ab609e88de5019a8014e0f96f6594b7b03cb504f4b81
languageName: node languageName: node
linkType: hard linkType: hard
@@ -2023,11 +2017,11 @@ __metadata:
linkType: hard linkType: hard
"minimatch@npm:^10.2.2": "minimatch@npm:^10.2.2":
version: 10.2.2 version: 10.2.4
resolution: "minimatch@npm:10.2.2" resolution: "minimatch@npm:10.2.4"
dependencies: dependencies:
brace-expansion: "npm:^5.0.2" brace-expansion: "npm:^5.0.2"
checksum: 10/e135be7b502ac97c02bcee42ccc1c55dc26dbac036c0f4acde69e42fe339d7fb53fae711e57b3546cb533426382ea492c73a073c7f78832e0453d120d48dd015 checksum: 10/aea4874e521c55bb60744685bbffe3d152e5460f84efac3ea936e6bbe2ceba7deb93345fec3f9bb17f7b6946776073a64d40ae32bf5f298ad690308121068a1f
languageName: node languageName: node
linkType: hard linkType: hard
@@ -2041,17 +2035,17 @@ __metadata:
linkType: hard linkType: hard
"minipass-fetch@npm:^5.0.0": "minipass-fetch@npm:^5.0.0":
version: 5.0.1 version: 5.0.2
resolution: "minipass-fetch@npm:5.0.1" resolution: "minipass-fetch@npm:5.0.2"
dependencies: dependencies:
encoding: "npm:^0.1.13" iconv-lite: "npm:^0.7.2"
minipass: "npm:^7.0.3" minipass: "npm:^7.0.3"
minipass-sized: "npm:^2.0.0" minipass-sized: "npm:^2.0.0"
minizlib: "npm:^3.0.1" minizlib: "npm:^3.0.1"
dependenciesMeta: dependenciesMeta:
encoding: iconv-lite:
optional: true optional: true
checksum: 10/08bf0c9866e7f344bf1863ce0d99c0a6fe96b43ef5a4119e23d84a21e613a3f55ecf302adf28d9e228b4ebd50e81d5e84c397e0535089090427319379f478d94 checksum: 10/4f3f65ea5b20a3a287765ebf21cc73e62031f754944272df2a3039296cc75a8fc2dc50b8a3c4f39ce3ac6e5cc583e8dc664d12c6ab98e0883d263e49f344bc86
languageName: node languageName: node
linkType: hard linkType: hard
@@ -2287,19 +2281,19 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"pg-pool@npm:^3.11.0": "pg-pool@npm:^3.12.0":
version: 3.11.0 version: 3.12.0
resolution: "pg-pool@npm:3.11.0" resolution: "pg-pool@npm:3.12.0"
peerDependencies: peerDependencies:
pg: ">=8.0" pg: ">=8.0"
checksum: 10/51c77d99f17cf791333467352df8326e0f70f9c517eada65a5e7819b2422f6e655e52319f5406eb578504442ae5f399b6e1d023e41d0c199aaf82879a890db6d checksum: 10/81a4220b89ba28034c51db0a7e231a8c1555ddb3cf8bacde0acd092fb26473763a335629ffcf5153059dd8f406d39610e384fd9176d34359a3d1498b4c5b95cd
languageName: node languageName: node
linkType: hard linkType: hard
"pg-protocol@npm:*, pg-protocol@npm:^1.11.0": "pg-protocol@npm:*, pg-protocol@npm:^1.12.0":
version: 1.11.0 version: 1.12.0
resolution: "pg-protocol@npm:1.11.0" resolution: "pg-protocol@npm:1.12.0"
checksum: 10/a70b1b4a3fc5b1be80dfdd65c829a149b8bd9df7488f9c47e0b51c9413aec5eb6da0a9ae9812891d74cd9f2ee90c0e391984a41b64603e7375fcbb9e07070b08 checksum: 10/0f5d8a5dbef39ef4d06686910ad61599b8d26c4505e76af2f6da3a1a1028c312f61678fae5e5012d477fe318b5ebc8507a828c087973b22e5fd4ec1e7394101a
languageName: node languageName: node
linkType: hard linkType: hard
@@ -2317,13 +2311,13 @@ __metadata:
linkType: hard linkType: hard
"pg@npm:^8.18.0": "pg@npm:^8.18.0":
version: 8.18.0 version: 8.19.0
resolution: "pg@npm:8.18.0" resolution: "pg@npm:8.19.0"
dependencies: dependencies:
pg-cloudflare: "npm:^1.3.0" pg-cloudflare: "npm:^1.3.0"
pg-connection-string: "npm:^2.11.0" pg-connection-string: "npm:^2.11.0"
pg-pool: "npm:^3.11.0" pg-pool: "npm:^3.12.0"
pg-protocol: "npm:^1.11.0" pg-protocol: "npm:^1.12.0"
pg-types: "npm:2.2.0" pg-types: "npm:2.2.0"
pgpass: "npm:1.0.5" pgpass: "npm:1.0.5"
peerDependencies: peerDependencies:
@@ -2334,7 +2328,7 @@ __metadata:
peerDependenciesMeta: peerDependenciesMeta:
pg-native: pg-native:
optional: true optional: true
checksum: 10/91c622f179f60df08ab7aa9b05a890567ea47f2d7984377b64e88e1eba1c42787324b7fc5ff00e109a757f3329dc4b57c73502603ae2765d1827b2082abbdcfa checksum: 10/0d552512b6c65c20b4054a203632f8ad51f6c5e60b8aaf65f5dc9f07a698da1e8974ca3918964999ea783c370bda9d230e662d2bab333b3968a28086923934e0
languageName: node languageName: node
linkType: hard linkType: hard
@@ -2378,13 +2372,13 @@ __metadata:
linkType: hard linkType: hard
"postcss@npm:^8.5.6": "postcss@npm:^8.5.6":
version: 8.5.6 version: 8.5.8
resolution: "postcss@npm:8.5.6" resolution: "postcss@npm:8.5.8"
dependencies: dependencies:
nanoid: "npm:^3.3.11" nanoid: "npm:^3.3.11"
picocolors: "npm:^1.1.1" picocolors: "npm:^1.1.1"
source-map-js: "npm:^1.2.1" source-map-js: "npm:^1.2.1"
checksum: 10/9e4fbe97574091e9736d0e82a591e29aa100a0bf60276a926308f8c57249698935f35c5d2f4e80de778d0cbb8dcffab4f383d85fd50c5649aca421c3df729b86 checksum: 10/cbacbfd7f767e2c820d4bf09a3a744834dd7d14f69ff08d1f57b1a7defce9ae5efcf31981890d9697a972a64e9965de677932ef28e4c8ba23a87aad45b82c459
languageName: node languageName: node
linkType: hard linkType: hard
@@ -2441,16 +2435,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"promise-retry@npm:^2.0.1":
version: 2.0.1
resolution: "promise-retry@npm:2.0.1"
dependencies:
err-code: "npm:^2.0.2"
retry: "npm:^0.12.0"
checksum: 10/96e1a82453c6c96eef53a37a1d6134c9f2482f94068f98a59145d0986ca4e497bf110a410adf73857e588165eab3899f0ebcf7b3890c1b3ce802abc0d65967d4
languageName: node
linkType: hard
"proxy-addr@npm:^2.0.7": "proxy-addr@npm:^2.0.7":
version: 2.0.7 version: 2.0.7
resolution: "proxy-addr@npm:2.0.7" resolution: "proxy-addr@npm:2.0.7"
@@ -2547,10 +2531,10 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"retry@npm:^0.12.0": "retry@npm:^0.13.1":
version: 0.12.0 version: 0.13.1
resolution: "retry@npm:0.12.0" resolution: "retry@npm:0.13.1"
checksum: 10/1f914879f97e7ee931ad05fe3afa629bd55270fc6cf1c1e589b6a99fab96d15daad0fa1a52a00c729ec0078045fe3e399bd4fd0c93bcc906957bdc17f89cb8e6 checksum: 10/6125ec2e06d6e47e9201539c887defba4e47f63471db304c59e4b82fc63c8e89ca06a77e9d34939a9a42a76f00774b2f46c0d4a4cbb3e287268bd018ed69426d
languageName: node languageName: node
linkType: hard linkType: hard
@@ -2872,6 +2856,7 @@ __metadata:
version: 0.0.0-use.local version: 0.0.0-use.local
resolution: "sim-eventos@workspace:." resolution: "sim-eventos@workspace:."
dependencies: dependencies:
"@sf-alvar/db-migrate": "npm:1.0.6"
"@tsconfig/node22": "npm:^22.0.5" "@tsconfig/node22": "npm:^22.0.5"
"@types/amqplib": "npm:^0.10.8" "@types/amqplib": "npm:^0.10.8"
"@types/cors": "npm:^2.8.19" "@types/cors": "npm:^2.8.19"
@@ -2884,7 +2869,6 @@ __metadata:
axios: "npm:^1.13.3" axios: "npm:^1.13.3"
concurrently: "npm:^9.2.1" concurrently: "npm:^9.2.1"
cors: "npm:^2.8.5" cors: "npm:^2.8.5"
db-migrate: "https://git.savefamilygps.net/alvarsanmartin/herramienta-migracion.git"
dotenv: "npm:^17.2.3" dotenv: "npm:^17.2.3"
express: "npm:^5.2.1" express: "npm:^5.2.1"
pg: "npm:^8.18.0" pg: "npm:^8.18.0"
@@ -3079,11 +3063,11 @@ __metadata:
linkType: hard linkType: hard
"strip-ansi@npm:^7.1.0": "strip-ansi@npm:^7.1.0":
version: 7.1.2 version: 7.2.0
resolution: "strip-ansi@npm:7.1.2" resolution: "strip-ansi@npm:7.2.0"
dependencies: dependencies:
ansi-regex: "npm:^6.0.1" ansi-regex: "npm:^6.2.2"
checksum: 10/db0e3f9654e519c8a33c50fc9304d07df5649388e7da06d3aabf66d29e5ad65d5e6315d8519d409c15b32fa82c1df7e11ed6f8cd50b0e4404463f0c9d77c8d0b checksum: 10/96da3bc6d73cfba1218625a3d66cf7d37a69bf0920d8735b28f9eeaafcdb6c1fe8440e1ae9eb1ba0ca355dbe8702da872e105e2e939fa93e7851b3cb5dd7d316
languageName: node languageName: node
linkType: hard linkType: hard