19 Commits
1.2.0 ... 1.3.0

Author SHA1 Message Date
9d63d23754 Mejor gestion de errores para los order 2026-03-26 12:21:28 +01:00
a95655a2a6 Completada la tarea de volcado 2026-03-26 09:29:09 +01:00
025801a689 Repositorio de lineas funciona 2026-03-25 11:51:14 +01:00
28880c4d99 Lineas activas e insertar cada una 2026-03-24 17:27:52 +01:00
5bb3bc554b doc 2026-03-24 11:20:59 +01:00
cfb907b840 Copy en jenkins 2026-03-11 12:34:41 +01:00
d5d7953fd2 Endpoint para documentacion 2026-03-11 12:31:17 +01:00
96298aab25 Docs en HTML 2026-03-11 11:35:16 +01:00
c17cca1e81 Sobreescribia el registerSobreescribia el register 2026-03-10 10:43:56 +01:00
7264efcf79 Errata 2026-03-10 10:42:32 +01:00
8934bcd603 Copia yarnrc 2026-03-10 10:39:39 +01:00
bdd08dbc56 Copiar yarnrc a docker 2026-03-10 10:37:26 +01:00
7d47fde806 Solucionado problema db-migrate 2026-03-10 10:21:53 +01:00
ad207fb732 db-migrate 2026-03-10 09:34:17 +01:00
bd9081b5bc hardcodeado el customerAccountCode 2026-03-06 11:18:30 +01:00
a429e9d14a Errata customer 2026-03-06 11:13:47 +01:00
81eb986313 Error de tipado 2026-03-06 11:06:15 +01:00
58bedc42f1 Bug de correlation_id en las llamadas a objenious 2026-03-06 11:02:18 +01:00
b97f422261 Prod 2026-03-05 10:33:38 +01:00
31 changed files with 816 additions and 92 deletions

View File

@@ -4,4 +4,8 @@ enableGlobalCache: false
nodeLinker: node-modules nodeLinker: node-modules
npmRegistryServer: "https://git.savefamilygps.net/api/packages/alvarsanmartin/npm/" npmScopes:
sf-alvar:
npmRegistryServer: "https://git.savefamilygps.net/api/packages/SaveFamily/npm/"
npmRegistryServer: "https://registry.npmjs.org/"

View File

@@ -1,10 +1,3 @@
#/bin/bash #/bin/bash
rm deployment/database/init.sql
# cat deployment/database/*.sql >deployment/database/init.sql
cp deployment/database/esquema_final* deployment/database/init.sql
# compatibilidad con postgresql < 17
sed -i '/\\restrict/d' deployment/database/init.sql
sed -i '/\\unrestrict/d' deployment/database/init.sql
docker compose -f deployment/local/docker/docker-compose.yaml --project-directory ./ build docker compose -f deployment/local/docker/docker-compose.yaml --project-directory ./ build

View File

@@ -0,0 +1,20 @@
CREATE table if not exists objenious_lines (
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
simId BIGINT UNIQUE,
status TEXT,
iccid TEXT NOT NULL,
msisdn TEXT,
imei TEXT,
imeiChangeDate TIMESTAMPTZ,
offerCode TEXT,
preactivationDate TIMESTAMPTZ, -- No viene con hora
activationDate TIMESTAMPTZ,
commercialStatus TEXT,
commercialStatusDate TIMESTAMPTZ,
billingStatus TEXT,
billingStatusChangeDate TIMESTAMPTZ,
billingActivationDate TIMESTAMPTZ,
createDate TIMESTAMPTZ,
raw JSONB,
hash TEXT
)

View File

@@ -6,12 +6,11 @@ WORKDIR /home/node/app
RUN corepack enable RUN corepack enable
COPY ./dist/packages ./packages COPY ./dist/packages ./packages
COPY ./.yarnrc.yml ./
COPY ./docs ./docs
COPY ./package.json ./ COPY ./package.json ./
# Force node-modules linker (no .yarnrc.yml in build context)
RUN echo 'nodeLinker: node-modules' > .yarnrc.yml
RUN yarn install RUN yarn install
RUN mkdir -p dist && ln -sf ../packages dist/packages RUN mkdir -p dist && ln -sf ../packages dist/packages

View File

@@ -60,6 +60,11 @@ pipeline {
sourceFiles: "dist/**/*", sourceFiles: "dist/**/*",
excludes: "dist/**/node_modules/**" excludes: "dist/**/node_modules/**"
), ),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "docs/**/*",
),
sshTransfer( sshTransfer(
cleanRemote: false, cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH", remoteDirectory: "$APP_REMOTE_PATH",
@@ -88,6 +93,11 @@ pipeline {
remoteDirectory: "$APP_REMOTE_PATH", remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "package.json", sourceFiles: "package.json",
), ),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: ".yarnrc.yml",
),
sshTransfer( sshTransfer(
cleanRemote: false, cleanRemote: false,
execCommand: "sh $APP_REMOTE_PATH/rebuild.sh" execCommand: "sh $APP_REMOTE_PATH/rebuild.sh"

View File

@@ -14,6 +14,7 @@ COPY ./packages ./packages
COPY tsconfig*.json ./ COPY tsconfig*.json ./
COPY .env* ./ COPY .env* ./
COPY ./.yarnrc.yml ./ COPY ./.yarnrc.yml ./
COPY ./docs ./docs
COPY ./deployment/local/docker/start.sh ./ COPY ./deployment/local/docker/start.sh ./
# Copiar el archivo de migrations? porque ahora no creo que se esté lanzando nada # Copiar el archivo de migrations? porque ahora no creo que se esté lanzando nada
COPY ./deployment/database/migrations ./deployment/database/migrations COPY ./deployment/database/migrations ./deployment/database/migrations

View File

@@ -40,6 +40,9 @@ services:
- path: ./packages - path: ./packages
action: sync action: sync
target: /usr/local/app/packages target: /usr/local/app/packages
- path: ./docs
action: sync
target: /usr/local/app/docs
- path: ./package.json - path: ./package.json
action: rebuild action: rebuild
ports: ports:
@@ -72,7 +75,6 @@ services:
- "${POSTGRES_PORT}:${POSTGRES_PORT}" - "${POSTGRES_PORT}:${POSTGRES_PORT}"
volumes: volumes:
- ./sql-data/:/var/lib/postgres/data - ./sql-data/:/var/lib/postgres/data
- ./deployment/database/init.sql:/docker-entrypoint-initdb.d/init.sql
healthcheck: healthcheck:
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"] test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"]
interval: 5s interval: 5s

File diff suppressed because one or more lines are too long

16
docs/sim-api/Docs.bru Normal file
View File

@@ -0,0 +1,16 @@
meta {
name: Docs
type: http
seq: 12
}
get {
url: {{baseurl}}/docs/sim-api-documentation.html
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -5,16 +5,16 @@ meta {
} }
get { get {
url: https://api-getway.objenious.com/ws/lines?pageSize=10&identifier.identifierType=ICCID&identifier.identifiers=8933201125065160455 url: https://api-getway.objenious.com/ws/lines?pageSize=1000&simStatus=ACTIVATED
body: formUrlEncoded body: formUrlEncoded
auth: bearer auth: bearer
} }
params:query { params:query {
pageSize: 10 pageSize: 1000
identifier.identifierType: ICCID simStatus: ACTIVATED
identifier.identifiers: 8933201125065160455 ~identifier.identifierType: ICCID
~simStatus: ACTIVATED ~identifier.identifiers: 8933201125065160455
} }
auth:bearer { auth:bearer {

View File

@@ -37,7 +37,7 @@ body:form-urlencoded {
} }
vars:pre-request { vars:pre-request {
params.id: 14557 params.id: 15102
} }
settings { settings {

View File

@@ -19,6 +19,7 @@
"migrate": "yarn db-migrate -e .env -m deployment/database/migrations -t 99.0.0" "migrate": "yarn db-migrate -e .env -m deployment/database/migrations -t 99.0.0"
}, },
"dependencies": { "dependencies": {
"@sf-alvar/db-migrate": "1.0.3",
"@tsconfig/node22": "^22.0.5", "@tsconfig/node22": "^22.0.5",
"amqp-connection-manager": "^5.0.0", "amqp-connection-manager": "^5.0.0",
"amqplib": "^0.10.9", "amqplib": "^0.10.9",

View File

@@ -86,7 +86,7 @@ export class SimController {
const resp = await this.tryUseCase(msg, this.useCases.activate({ const resp = await this.tryUseCase(msg, this.useCases.activate({
correlation_id: msgData.headers?.message_id, correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(DUE_DATE_SECONDS).toISOString(), dueDate: this.genDueDate(DUE_DATE_SECONDS).toISOString(),
customerAccountCode: env.OBJ_CUSTOMER_CODE, customerAccountCode: "9.49411.10",
identifier: { identifier: {
identifierType: "ICCID", identifierType: "ICCID",
identifiers: [iccid] identifiers: [iccid]

View File

@@ -110,7 +110,10 @@ export class SimUseCases {
const OPERATION_URL = "/actions/activateLine" const OPERATION_URL = "/actions/activateLine"
return async () => { return async () => {
const req = this.httpClient.client.post(OPERATION_URL, { const req = this.httpClient.client.post(OPERATION_URL, {
...activationData dueDate: activationData.dueDate,
identifier: activationData.identifier,
customerAccountCode: activationData.customerAccountCode,
offer: activationData.offer
}) })
try { try {
@@ -225,7 +228,10 @@ export class SimUseCases {
const OPERATION_URL = "/actions/suspendLine" const OPERATION_URL = "/actions/suspendLine"
return this.generateUseCase({ return this.generateUseCase({
correlation_id: suspendData.correlation_id, correlation_id: suspendData.correlation_id,
operationPayload: suspendData, operationPayload: {
dueDate: suspendData.dueDate,
identifier: suspendData.identifier
},
url: OPERATION_URL, url: OPERATION_URL,
iccid: suspendData.identifier.identifiers[0], // iccid: suspendData.identifier.identifiers[0], //
operation: "suspend" operation: "suspend"
@@ -236,7 +242,10 @@ export class SimUseCases {
const OPERATION_URL = "/actions/terminateLine" const OPERATION_URL = "/actions/terminateLine"
return this.generateUseCase({ return this.generateUseCase({
correlation_id: terminationData.correlation_id, correlation_id: terminationData.correlation_id,
operationPayload: terminationData, operationPayload: {
dueDate: terminationData.dueDate,
identifier: terminationData.identifier
},
url: OPERATION_URL, url: OPERATION_URL,
iccid: terminationData.identifier.identifiers[0], // iccid: terminationData.identifier.identifiers[0], //
operation: "terminate" operation: "terminate"

View File

@@ -79,7 +79,7 @@ export class SimController {
...usecaseResult.error ...usecaseResult.error
} }
}).send() }).send()
args.onError(body, usecaseResult.error?.msg?.message ?? "Error indefinido") args.onError(body, usecaseResult.error ?? "Error indefinido")
return 1; return 1;
} }

View File

@@ -198,7 +198,7 @@ export class SimUsecases {
* alias de bloquear / suspender en objenious * alias de bloquear / suspender en objenious
*/ */
async pause(args: { iccid: string, compañia: string }): async pause(args: { iccid: string, compañia: string }):
Promise<Result<string, { iccid: string, message_id: string, operation: "cancelation" }>> { Promise<Result<string, { iccid: string, message_id: string, operation: "pause" }>> {
const pauseEvent = <SimEvents.pause>{ const pauseEvent = <SimEvents.pause>{
key: `sim.${args.compañia}.pause`, key: `sim.${args.compañia}.pause`,
payload: { payload: {
@@ -222,7 +222,7 @@ export class SimUsecases {
data: { data: {
iccid: args.iccid, iccid: args.iccid,
message_id: savedOrder.data.correlation_id, message_id: savedOrder.data.correlation_id,
operation: "cancelation" operation: "pause"
} }
} }
} }

View File

@@ -1,5 +1,6 @@
import express from "express" import express from "express"
import cors from 'cors'; import cors from 'cors';
import path from 'path';
import { simRoutes } from "./infrastructure/simRoutes.http.js" import { simRoutes } from "./infrastructure/simRoutes.http.js"
import { rabbitmqEventBus } from '#config/eventBusConfig.js'; import { rabbitmqEventBus } from '#config/eventBusConfig.js';
import { env } from "#config/env/index.js" import { env } from "#config/env/index.js"
@@ -27,6 +28,8 @@ app.use(express.urlencoded({ extended: true }));
app.use("/sim", simRoutes) app.use("/sim", simRoutes)
app.use("/orders", orderRoutes) app.use("/orders", orderRoutes)
app.use("/docs", express.static(path.join(process.cwd(), '../../docs')))
app.get("/health", (req, res) => { app.get("/health", (req, res) => {
res.status(200).json({ status: "ok" }) res.status(200).json({ status: "ok" })
}) })

View File

@@ -0,0 +1,20 @@
/**
* Cliente de postgres para la intranet. Se usa solo porque hace falta para el
* volcado de datos, si se usa en mas partes algo estás haciendo mal.
*/
import { Pool } from 'pg';
import { PgClient } from 'sim-shared/infrastructure/PgClient.js'
import { env } from './env/index.js';
export const pgPoolIntranet = new Pool({
user: env.POSTGRES_USER,
host: env.POSTGRES_HOST,
database: "intranet",
password: env.POSTGRES_PASSWORD,
port: Number(env.POSTGRES_PORT) || 5432,
});
export const postgresClientIntranet = new PgClient({
pool: pgPoolIntranet
})

View File

@@ -5,6 +5,9 @@ import { httpInstance } from "./config/httpClient.config.js"
import { CheckObjeniousRequests } from "./tasks/check_objenious_request.js" import { CheckObjeniousRequests } from "./tasks/check_objenious_request.js"
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js" import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js"
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js" import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
import { TaskVolcadoLineas } from "./tasks/volcado_lineas.js"
import { ObjeniousLinesRepository } from "./infranstructure/ObjeniousLinesRepository.js"
import { postgresClientIntranet } from "./config/intranetPostgresConfig.js"
async function startCron() { async function startCron() {
const commonSettings = { const commonSettings = {
@@ -14,10 +17,13 @@ async function startCron() {
const httpClient = httpInstance const httpClient = httpInstance
const pgClient = new PgClient({ pool: pgPool }) const pgClient = new PgClient({ pool: pgPool })
console.log("[i] Comprobando conexion con la BDD ")
await pgClient.checkDatabaseConnection() await pgClient.checkDatabaseConnection()
await pgClient.checkDatabaseConnection()
const operationRepository = new ObjeniousOperationsRepository(pgClient) const operationRepository = new ObjeniousOperationsRepository(pgClient)
const orderRepository = new OrderRepository(pgClient) const orderRepository = new OrderRepository(pgClient)
const objeniousLineRepository = new ObjeniousLinesRepository(postgresClientIntranet)
const objTask = new CheckObjeniousRequests( const objTask = new CheckObjeniousRequests(
operationRepository, operationRepository,
@@ -25,23 +31,28 @@ async function startCron() {
httpClient, httpClient,
) )
await objTask.getPendingOperations() const volcadoLineasTask = new TaskVolcadoLineas(httpClient, objeniousLineRepository)
const PERIODO_PETICIONES = 10 * 60 * 60
const interval = setInterval(async () => { const interval = setInterval(async () => {
console.log("Updating...") try {
await objTask.getPendingOperations() await objTask.getPendingOperations()
console.log("Update finished") } catch (e) {
}, 10 * 60 * 1000) console.error("[x] Error de actualizacion de las lineas ")
/*
const task = cron.createTask("* * * * *", async () => {
} }
, { }, PERIODO_PETICIONES)
...commonSettings,
name: "Test" const PERIODO_VOLCADO = 60 * 60 * 1000
}) const volcadoInterval = setInterval(async () => {
*/ try {
await volcadoLineasTask.loadLines()
} catch (e) {
console.error("[x] Volcado de lineas de Objenious Fallido", e)
}
}, PERIODO_VOLCADO)
await volcadoLineasTask.loadLines()
//await objTask.getPendingOperations()
} }

View File

@@ -0,0 +1,59 @@
import test, { after, before, describe } from "node:test";
import { CreateObjeniousLineDTO } from "sim-shared/domain/objeniousLine.js";
import { ObjeniousLinesRepository } from "./ObjeniousLinesRepository.js";
import { postgrClient } from "../config/postgreConfig.js";
import assert from "node:assert";
describe("Line insertion test", async () => {
//const pgClient = postgreClientIntranet
const pgClient = postgrClient // En prod hay que usar el de Intrantet para usar la otra base de datos
const lineRepository = new ObjeniousLinesRepository(pgClient)
const lineaTest: CreateObjeniousLineDTO = {
simId: 1234,
iccid: "9999999999999",
msisdn: "34654674732",
imei: "219789481293",
imeiChangeDate: new Date(),
offerCode: "SAVEFAMILY1",
status: "ACTIVATED",
preactivationDate: new Date(),
activationDate: new Date(),
commercialStatus: "test",
commercialStatusDate: new Date(),
billingStatus: "test",
billingStatusChangeDate: new Date(),
billingActivationDate: new Date(),
createDate: new Date(),
raw: { test: "test" } as any // Para este test no hace falta
}
// Clean up before and after tests to ensure isolation
const cleanup = async () => {
await pgClient.query("DELETE FROM objenious_lines WHERE simId = 1234");
};
before(async () => {
await cleanup()
})
after(async () => {
await cleanup()
})
test("Should insert new line", async () => {
const res = await lineRepository.insertOrUpdate(lineaTest)
assert.ok(res != undefined, "The line wasn't created")
})
test("Should not update a line if the hash is the same", async () => {
const res = await lineRepository.insertOrUpdate(lineaTest)
assert.ok(res == undefined, "The line have been updated")
})
test("Should update a line if the hash changes", async () => {
const updated = structuredClone(lineaTest)
lineaTest.billingActivationDate = new Date()
const res = await lineRepository.insertOrUpdate(lineaTest)
assert.ok(res != undefined, "The line have been updated")
})
})

View File

@@ -0,0 +1,112 @@
/**
* Repositorio para el volcado de lineas de objenious en intranet
* solo para uso en el volcado.
*/
import { createHash } from "node:crypto";
import { PoolClient } from "pg";
import { CreateObjeniousLineDTO } from "sim-shared/domain/objeniousLine.js";
import { PgClient } from "sim-shared/infrastructure/PgClient.js";
export class ObjeniousLinesRepository {
constructor(
private pgClient: PgClient
) {
}
private generateLineHash(data: CreateObjeniousLineDTO) {
try {
const lineStr = JSON.stringify(data)
const hash = createHash("sha256").update(lineStr).digest("base64url")
return hash
} catch (e) {
console.error("[x] Error generando el hash de la linea", data)
return undefined
}
}
public async insertOrUpdate(data: CreateObjeniousLineDTO) {
const query = `
INSERT INTO objenious_lines (
simId,
iccid,
msisdn,
imei,
imeiChangeDate,
offerCode,
status,
preactivationDate,
activationDate,
commercialStatus,
commercialStatusDate,
billingStatus,
billingStatusChangeDate,
billingActivationDate,
createDate,
raw,
hash
) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17
)
ON CONFLICT (simId)
DO UPDATE SET
iccid = EXCLUDED.iccid,
msisdn = EXCLUDED.msisdn,
imei = EXCLUDED.imei,
imeiChangeDate = EXCLUDED.imeiChangeDate,
offerCode = EXCLUDED.offerCode,
status = EXCLUDED.status,
preactivationDate = EXCLUDED.preactivationDate,
activationDate = EXCLUDED.activationDate,
commercialStatus = EXCLUDED.commercialStatus,
commercialStatusDate = EXCLUDED.commercialStatusDate,
billingStatus = EXCLUDED.billingStatus,
billingStatusChangeDate = EXCLUDED.billingStatusChangeDate,
billingActivationDate = EXCLUDED.billingActivationDate,
raw = EXCLUDED.raw,
hash = EXCLUDED.hash
WHERE objenious_lines.hash IS DISTINCT FROM EXCLUDED.hash
RETURNING id;
`;
const lineHash = this.generateLineHash(data)
if (lineHash == undefined) {
console.error("[x] Ignorando linea ", data)
return;
}
const values = [
data.simId,
data.iccid,
data.msisdn,
data.imei,
data.imeiChangeDate,
data.offerCode,
data.status,
data.preactivationDate,
data.activationDate,
data.commercialStatus,
data.commercialStatusDate,
data.billingStatus,
data.billingStatusChangeDate,
data.billingActivationDate,
data.createDate || new Date(), // Default a ahora si no viene
JSON.stringify(data.raw), // El driver de pg requiere string o el objeto directo para JSONB
lineHash
];
let client: PoolClient | undefined = undefined;
try {
client = await this.pgClient.connect();
const res = await client.query<{ id: number }>(query, values);
return res.rows[0];
} catch (err) {
console.error('Error en la inserción:', err);
throw err;
} finally {
if (client != undefined) {
client.release()
}
}
}
}

View File

@@ -5,20 +5,6 @@
"description": "", "description": "",
"main": "index.ts", "main": "index.ts",
"imports": { "imports": {
"#config/*.js": {
"types": "./config/*.ts",
"default": "./config/*.js"
},
"#config/*": {
"types": "./config/*.ts",
"default": "./config/*.js"
},
"#shared/*.js": {
"default": "../sim-shared/*.js"
},
"#shared/*": {
"default": "../sim-shared/*.js"
},
"#adapters/*.js": { "#adapters/*.js": {
"types": "./infrastructure/*.ts", "types": "./infrastructure/*.ts",
"default": "./infrastructure/*.js" "default": "./infrastructure/*.js"
@@ -45,7 +31,7 @@
} }
}, },
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1", "test": "node --import tsx --test ./**/*.test.ts",
"build": "tsc --build && tsc-alias -p tsconfig.json && cp package.json ../../dist/packages/sim-objenious-cron/", "build": "tsc --build && tsc-alias -p tsconfig.json && cp package.json ../../dist/packages/sim-objenious-cron/",
"dev": "tsx watch index.ts", "dev": "tsx watch index.ts",
"start": "node ../../dist/packages/sim-objenious-cron/index.js" "start": "node ../../dist/packages/sim-objenious-cron/index.js"

View File

@@ -1,4 +1,4 @@
import { env } from "#config/env/index.js"; import { env } from "../config/env/index.js";
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"; import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
import axios from "axios"; import axios from "axios";
import { IOperationsRepository, Objenious, ObjeniousOperation, ObjeniousOperationChange, StatusEnum } from "sim-shared/domain/operationsRepository.port.js"; import { IOperationsRepository, Objenious, ObjeniousOperation, ObjeniousOperationChange, StatusEnum } from "sim-shared/domain/operationsRepository.port.js";
@@ -16,6 +16,7 @@ export class CheckObjeniousRequests {
* TODO: meter a una funcion a parte task con los 3 pasos * TODO: meter a una funcion a parte task con los 3 pasos
*/ */
public async getPendingOperations() { public async getPendingOperations() {
console.log("[i] Inicio revision de peticiones")
// 1. Se obtienen todas las operaciones pendientes de la BDD // 1. Se obtienen todas las operaciones pendientes de la BDD
const pendingOperations = await this.operationsRepository.getPendingOperations() const pendingOperations = await this.operationsRepository.getPendingOperations()
@@ -49,11 +50,14 @@ export class CheckObjeniousRequests {
console.log("[cron] Solicitando status para", merged.map(e => e.id)) console.log("[cron] Solicitando status para", merged.map(e => e.id))
const result = await this.getMassActionsStatus(merged) const result = await this.getMassActionsStatus(merged)
console.log("[o] Revisión de eventos completa")
} }
/** /**
* Para una lista de operaciones **con mass_action_id** se comprueba si han tenido alguna actualizacion * Para una lista de operaciones **con mass_action_id** se comprueba si han tenido alguna actualizacion
* Devuelve el numero de operaciones comprobadas. * Devuelve el numero de operaciones comprobadas.
* TODO: Esto va en un repositorio
*/ */
private async getMassActionsStatus(requestList: ObjeniousOperation[]) { private async getMassActionsStatus(requestList: ObjeniousOperation[]) {
if (requestList.length == 0) return 0; if (requestList.length == 0) return 0;
@@ -119,9 +123,6 @@ export class CheckObjeniousRequests {
if (uorStatus == "finished") { if (uorStatus == "finished") {
console.log(" ****> Status", uorStatus) console.log(" ****> Status", uorStatus)
if (uorStatus != "finished") {
console.error("!!! Notificando estado no finished")
}
const targetIccids = originalAction.iccids const targetIccids = originalAction.iccids
const lineData = await this.getLineData(targetIccids) const lineData = await this.getLineData(targetIccids)
console.log("[i] lineData", lineData.content[0]) console.log("[i] lineData", lineData.content[0])
@@ -215,7 +216,7 @@ export class CheckObjeniousRequests {
const PATH = "/actions/requests/" const PATH = "/actions/requests/"
const operationsList = structuredClone(requestList) const operationsList = structuredClone(requestList)
// TODO: El for es gigantesco hay que simplificar partes
for (const request of operationsList) { for (const request of operationsList) {
if (request.id == undefined) continue; if (request.id == undefined) continue;
@@ -228,13 +229,50 @@ export class CheckObjeniousRequests {
try { try {
res = await req res = await req
} catch (e) { } catch (e) {
console.error("Error comprobando el estado de ", request, e) console.error("[x] Error comprobando el estado de ", request, e)
//todo actualizar el estado para incluir el error continue;
}
// 2. Casos de error o id no generada
if (res.data.massActionIds.length == 0) {
// Si no hay es que *puede* que haya un problema o no se ha generado todavia
const reports = res.data.actionRequestReports
// Se entiende que no hay report ni id = está a la espera
if (reports.length == 0) continue;
// ! Hay minimo un report -> se considera error y se para
const updateData: ObjeniousOperationChange = {
operation_id: request.id,
new_status: "error",
error: JSON.stringify(reports[0].actionRequestReportDataDTOs)
}
const updateRes = await this.operationsRepository.updateOperation(updateData)
if (updateRes.error != undefined) {
console.error("[x] Error actualizando el estado de la operacion", updateData.error)
}
if (request.correlation_id != undefined) {
this.orderRepository.errorOrder({
correlation_id: request.correlation_id,
status: "failed",
error: "MassId no obtenida",
reason: "MassId no obtenida",
stackTrace: JSON.stringify(reports[0].actionRequestReportDataDTOs)
}).then(e => {
if (e.error != undefined) {
console.error("[x] Error actualizando el estado del Order con correlation_id: ", request.correlation_id)
console.error(e.error)
}
}).catch(e => {
console.error("[x] Error actualizando el estado del Order con correlation_id: ", request.correlation_id)
})
}
continue; continue;
} }
// 2. Modificacion del massId si ha habido un cambio
const massActionId = res.data.massActionIds[0] const massActionId = res.data.massActionIds[0]
// 3. Modificacion del massId si ha habido un cambio
try { try {
if (res.status == 200 && res.data != undefined && massActionId != undefined) { if (res.status == 200 && res.data != undefined && massActionId != undefined) {
const updateData: ObjeniousOperationChange = { const updateData: ObjeniousOperationChange = {
@@ -248,7 +286,7 @@ export class CheckObjeniousRequests {
request.mass_action_id = String(massActionId) request.mass_action_id = String(massActionId)
} }
} catch (e) { } catch (e) {
console.log("Error actualizando el estado de ", request) console.log("[x] Error actualizando el estado de ", request)
continue; continue;
} }
} }
@@ -270,7 +308,17 @@ export class CheckObjeniousRequests {
"x-apikey-sim-activation": env.SIM_ACTIVATION_API_KEY "x-apikey-sim-activation": env.SIM_ACTIVATION_API_KEY
} }
}) })
await req try {
const res = await req
if (res.status != 200) {
console.error("[x] Error enviando el mail de confirmacion para ", operation, " status ", res.status, res.statusText)
}
} catch (e) {
console.error("[x] Error enviando el mail de confirmacion para ", operation)
console.error(e)
}
} }
} }

View File

@@ -0,0 +1,133 @@
import assert from "node:assert";
import { lineToCreateLineDto, ObjeniousLine, ObjeniousLineResponse } from "sim-shared/domain/objeniousLine.js";
import { tryCatch, Result } from "sim-shared/domain/Result.js";
import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js";
import { ObjeniousLinesRepository } from "../infranstructure/ObjeniousLinesRepository.js";
import { AxiosResponse } from "axios";
import { constants } from "node:buffer";
const MAX_PAGE_SIZE = 100
export class TaskVolcadoLineas {
constructor(
private readonly httpClient: HttpClient,
private readonly linesRepository: ObjeniousLinesRepository,
) {
}
/**
* Mover al repo
*/
private async * getLinesByStatus(args?: {
pageSize?: number,
pageNumber?: number,
status?: string
}): AsyncGenerator<Result<string, ObjeniousLine[]>, Result<string, ObjeniousLine[]>, any> {
const path = "/lines"
const pageSize = args?.pageSize ?? MAX_PAGE_SIZE;
let currentPage = args?.pageNumber ?? 0;
let totalPages: number | undefined = undefined; // Como limite de paginas, igual es pasarse pero hasta que se lea
const params: Record<string, string | number> = {}
const loadNextLine = async (page: number): Promise<Result<string, ObjeniousLine[]>> => {
if (args?.status != undefined) params["simStatus"] = args.status
params["pageSize"] = pageSize
params["pageNumber"] = page
console.log("Params", params)
console.log(`[i] Cargando pagina ${currentPage} de ${totalPages ?? "(desc)"}`)
const nextPage = await tryCatch<AxiosResponse<ObjeniousLineResponse>>(this.httpClient.client.get(path, {
params: params
}))
if (nextPage.error != undefined) {
console.error(nextPage.error.msg)
return {
error: nextPage.error.msg.message
}
}
// Se aumenta para la siguiente ejecucion
console.log(`[i] Página ${currentPage} completa, total: ${nextPage.data.data.totalPages}`)
totalPages = nextPage.data.data.totalPages
return {
data: nextPage.data.data.content
}
}
// El inicio se ejecuta siempre
const lines = await loadNextLine(currentPage)
if (lines.error != undefined) {
console.error("[x] Error obteniendo las lineas, cancelando operación");
return {
error: "Error cargando lineas"
}
}
currentPage++;
yield {
data: lines.data
}
// Copia para evitar bucles infinitos por error de la api
const maxPages = totalPages
assert.ok(maxPages != undefined, "No se ha defindo el numero de paginas") // Nunca deberia pasar pero así se evitan bucles infnitos
console.log("maxPages", maxPages)
for (let i = currentPage; i < maxPages!; i++) {
console.log("Bucle i:", i, "page: ", currentPage)
yield await loadNextLine(currentPage);
currentPage++;
}
return {
data: []
}
}
private async saveLines(lines: ObjeniousLine[]) {
const linesToCreate = lines.map(lineToCreateLineDto)
let created: number[] = []
for (const line of linesToCreate) {
// Si es lento pasar a Promise.all
const res = await this.linesRepository.insertOrUpdate(line)
if (res?.id != undefined)
created.push(res.id)
}
}
public async loadLines() {
console.log("[i] Iniciando task de volcado de lineas de Objenious")
// Carga todas las lineas en memoria, hay que comprobar que no se gaste demasiada
const linesIterator = this.getLinesByStatus()
let lines = await linesIterator.next()
if (lines.value.error != undefined || lines.value.data == undefined) {
console.error("[x] Error cargando las lineas a volcar", lines.value.error)
return;
}
await this.saveLines(lines.value.data)
while (!lines.done) {
console.log()
lines = await linesIterator.next()
if (lines.value.error != undefined || lines.value.data == undefined) {
console.error("[x] Error cargando las lineas a volcar", lines.value.error)
return;
}
await this.saveLines(lines.value.data)
}
console.log("[i] Terminado task de volcado de lineas de Objenious")
}
}

View File

@@ -62,11 +62,14 @@ export type CreateOrderDTO = Pick<
'correlation_id' | 'exchange' | 'routing_key' | 'order_type' | 'payload' | 'webhook_host' | 'webhook_endpoint' 'correlation_id' | 'exchange' | 'routing_key' | 'order_type' | 'payload' | 'webhook_host' | 'webhook_endpoint'
>; >;
export type UpdateOrderDTO = type IdOrCorrelationID =
( (
{ id: number, correlation_id?: never } | { id: number, correlation_id?: never } |
{ id?: never, correlation_id: string } { id?: never, correlation_id: string }
) )
export type UpdateOrderDTO =
IdOrCorrelationID
& &
{ {
new_status: OrderStatus, new_status: OrderStatus,
@@ -74,12 +77,20 @@ export type UpdateOrderDTO =
} }
export type FinishOrderDTO = export type FinishOrderDTO =
( IdOrCorrelationID
{ id: number, correlation_id?: never } |
{ id?: never, correlation_id: string }
)
& &
{ {
reason?: string reason?: string
} }
export type ErrorOrderDTO =
IdOrCorrelationID
&
{
status: "failed" | "dlx",
reason: string,
error?: string,
stackTrace?: string
}

View File

@@ -0,0 +1,144 @@
export type ObjeniousLineResponse = {
content: ObjeniousLine[],
offset: number,
pageNumber: number,
pageSize: number,
paged: boolean,
totalPages: number,
totalElements: number
}
export type ObjeniousLine = {
identifier: {
simId: number,
iccid: string,
imsi: string,
msisdn: string,
amsisdn?: string,
imei: string
},
simCardType: {
code: string,
description: string
},
device: {
imei: string,
imeiChangeDate: string, //Fecha iso
deviceReference?: string | null,
manufacturer?: string | null,
},
customerAccount: {
code: string,
label: string,
address: {
address1: string,
address2: string,
address3: string,
zipCode: string,
city: string,
country: string,
state?: string | null
}
},
offer: {
code: string,
description: string,
},
party: {
name: string,
code: string,
contractReference: string,
partyType: string,
},
lineCustomFields: {
custom1: {
label: string | null,
value: string | null
},
custom2: {
label: string | null,
value: string | null
},
custom3: {
label: string | null,
value: string | null
},
custom4: {
label: string | null,
value: string | null
},
custom5: {
label: string | null,
value: string | null
},
custom6: {
label: string | null,
value: string | null
}
},
status: {
status: string,
preactivationDate: string | null, //"2026-03-17",
activationDate: string | null, //"2026-03-17T11:04:11.408+00:00",
commercialStatus: string, //"test",
commercialStatusDate: string, //"2026-03-17T11:41:01.493+00:00",
networkStatus: string, // "ACTIVATED",
billingStatus: string, //"TEST",
billingStatusChangeDate: string | null, // "2026-03-17T11:01:00.276+00:00",
billingActivationDate: string | null //,
createdDate: string | null,//"2026-01-30T01:50:02.060+00:00"
},
services: string | null
};
export type ObjeniousLineDb = {
id: number;
simId?: number;
iccid: string;
msisdn?: string;
imei?: string;
imeiChangeDate?: Date;
offerCode?: string;
status?: string;
preactivationDate?: Date | null;
activationDate?: Date | null;
commercialStatus?: string;
commercialStatusDate?: Date | null;
billingStatus?: string;
billingStatusChangeDate?: Date | null;
billingActivationDate?: Date | null;
createDate?: Date | null;
raw: ObjeniousLine;
}
// DTO para inserción (omite el ID autogenerado)
export type CreateObjeniousLineDTO = Omit<ObjeniousLineDb, 'id'>;
export function lineToCreateLineDto(line: ObjeniousLine): CreateObjeniousLineDTO {
const dateOrNull = (data: string | null) => {
if (data == null) return null;
return new Date(data)
}
const transformed: CreateObjeniousLineDTO = {
simId: line.identifier.simId,
iccid: line.identifier.iccid,
msisdn: line.identifier.msisdn,
imei: line.identifier.imei,
imeiChangeDate: new Date(line.device.imeiChangeDate),
offerCode: line.offer.code,
status: line.status.status,
preactivationDate: dateOrNull(line.status.preactivationDate),
activationDate: dateOrNull(line.status.activationDate),
commercialStatus: line.status.commercialStatus,
commercialStatusDate: dateOrNull(line.status.commercialStatusDate),
billingStatus: line.status.billingStatus,
billingStatusChangeDate: dateOrNull(line.status.activationDate),
billingActivationDate: dateOrNull(line.status.activationDate),
createDate: dateOrNull(line.status.activationDate),
raw: line
}
return transformed;
}

View File

@@ -46,9 +46,33 @@ export namespace Objenious {
created: string, created: string,
status: "NEW" | "RUNNING" | "OK" | "KO" | "REPLAYED" | "CANCELLED" | "CLOSED" | "DISABLED", status: "NEW" | "RUNNING" | "OK" | "KO" | "REPLAYED" | "CANCELLED" | "CLOSED" | "DISABLED",
statusDate: string, statusDate: string,
actionType: "PREACTIVATION_AND_ACTIVATION" | string, // todo: añadir el resto actionType: ActionType
massActionIds: number[] massActionIds: number[],
actionRequestReports:
{
requestId: string,
actionRequestReportDataDTOs: [
{
data: string,
newData: string | null,
iccid: string,
dataStatus: DataStatus
} }
]
}[],
}
export type DataStatus = "DATA_INVALID_FORMAT" | "DATA_NOT_FOUND" | "DATA_NOT_ACTIVATED" | "SERVICE_DATA_NOT_ACTIVATED" |
"DATA_WRONG_STATUS" | "DATA_NOT_AUTHORIZED" | "DATA_CUSTOMER_ACCOUNT_NOT_AUTHORIZED" | "DATA_AMBIGUOUS" |
"NEW_DATA_INVALID_FORMAT" | "NEW_DATA_ALREADY_EXISTS" | "DUPLICATE_DATA" | "DATA_TERMINATION_VALIDATED" |
"DATA_TERMINATION_SECURISED" | "MAX_ALARM_INSTANCE" | "MAX_ALARM_INSTANCE_TO_CATCH_UP" |
"ACTIVATED_LINE_CANNOT_BE_TRANSFERED" | "ESIM_WRONG_STEP" | "ESIM_WRONG_PAIRED_VALUE" |
"ESIM_WRONG_DOWNLOAD_STATE" | "ESIM_WRONG_STATUS" | "ESIM_WRONG_FAMILY" | "ESIM_WRONG_CATEGORY" |
"ENTITY_STATUS_NOT_AUTHORIZED" | "LONG_LIFE_NOT_ALLOWED" | "RCARD_NOT_COMPATIBLE" | "APN_NOT_FOUND" |
"APN_OR_DNN_NOT_FOUND" | "APN_CONFIGURATION_NOT_FOUND" | "APN_CONFIGURATION_INVALID_PARAMETER_FILE" |
"IP_NOT_AVAILABLE" | "RADIUS_FIELD_LENGTH_NOT_ALLOWED" | "RADIUS_LOGIN_OR_PASSWORD_NOT_FOUND" | "RADIUS_PASSWORD_NOT_ALLOWED" |
"RADIUS_LOGIN_NOT_ALLOWED" | "NETWORK_NOT_ACTIVATED" | "CHANGE_CUSTOMER_ACCOUNT_NOT_AllOWED" | "CHANGE_OFFER_NOT_ALLOWED" |
"SIM_NOT_EUICC" | "OFFER_NOT_WSF_PALIER_FLOTTE_FR"
export type ActionType = "PREACTIVATION" | "PREACTIVATION_ACTIVATION" | "ACTIVATION" | export type ActionType = "PREACTIVATION" | "PREACTIVATION_ACTIVATION" | "ACTIVATION" |
"STATUS_CHANGE" | "ICCID_CHANGE" | "EUICC_NOTIFICATION" "STATUS_CHANGE" | "ICCID_CHANGE" | "EUICC_NOTIFICATION"

View File

@@ -2,7 +2,7 @@
* TODO: Usar * TODO: Usar
*/ */
import { PoolClient, QueryResult, QueryResultRow } from "pg"; import { PoolClient, QueryResult, QueryResultRow } from "pg";
import { CreateOrderDTO, FinishOrderDTO, OrderTracking, UpdateOrderDTO } from "../domain/Order.js"; import { CreateOrderDTO, ErrorOrderDTO, FinishOrderDTO, OrderTracking, UpdateOrderDTO } from "../domain/Order.js";
import { Result } from "../domain/Result.js"; import { Result } from "../domain/Result.js";
import { PgClient } from "./PgClient.js"; import { PgClient } from "./PgClient.js";
import assert from "node:assert"; import assert from "node:assert";
@@ -353,22 +353,19 @@ export class OrderRepository {
} }
// TODO: tema de poder filtrar por correlation_id // TODO: tema de poder filtrar por correlation_id
public async errorOrder(args: { public async errorOrder(args: ErrorOrderDTO): Promise<Result<string, OrderTracking<any>>> {
id: number,
status: "failed" | "dlx",
reason: string,
error?: string,
stackTrace?: string
}) {
const client = await this.pgClient.connect(); const client = await this.pgClient.connect();
await client.query('BEGIN'); await client.query('BEGIN');
const idType = ('id' in args) ? "id" : "correlation_id"
const idValue = (args.id != undefined) ? args.id : args.correlation_id
// 1. Se consulta la order de base // 1. Se consulta la order de base
const qCurrentOrder = ` const qCurrentOrder = `
SELECT * FROM order_tracking SELECT * FROM order_tracking
WHERE id = $1 WHERE ${idType} = $1
` `
const vCurrentOrder = [args.id] const vCurrentOrder = [idValue]
const currentOrderResult = await this.getFirst(client.query<OrderTracking<any>>(qCurrentOrder, vCurrentOrder)) const currentOrderResult = await this.getFirst(client.query<OrderTracking<any>>(qCurrentOrder, vCurrentOrder))
@@ -378,6 +375,7 @@ export class OrderRepository {
return currentOrderResult return currentOrderResult
} }
const id = currentOrderResult.data.id // Saco el id para evitar busacr por correlation_id que es mas lento
const currentOrder = currentOrderResult.data! const currentOrder = currentOrderResult.data!
// 3. Si todo ok se actualiza el order // 3. Si todo ok se actualiza el order
@@ -395,7 +393,7 @@ export class OrderRepository {
WHERE id = $1 WHERE id = $1
RETURNING id, status, update_date; RETURNING id, status, update_date;
` `
const vOrderTracking = [args.id, args.status, args.error, args.stackTrace] const vOrderTracking = [id, args.status, args.error, args.stackTrace]
const updatedOrderResult = await this.getFirst( const updatedOrderResult = await this.getFirst(
client.query<{ id: number, status: string, update_date: string }>(uOrderTracking, vOrderTracking) client.query<{ id: number, status: string, update_date: string }>(uOrderTracking, vOrderTracking)
) )

View File

@@ -1,12 +1,3 @@
#/bin/bash #/bin/bash
rm deployment/database/init.sql
# init sql debe juntar todos los scripts de "base" (sin contar migraciones)
cat deployment/database/base/*.sql >deployment/database/init.sql
#cp deployment/database/esquema_final* deployment/database/init.sql
# compatibilidad con postgresql < 17
sed -i '/\\restrict/d' deployment/database/init.sql
sed -i '/\\unrestrict/d' deployment/database/init.sql
docker compose -f deployment/local/docker/docker-compose.yaml --project-directory ./ up --watch docker compose -f deployment/local/docker/docker-compose.yaml --project-directory ./ up --watch

104
yarn.lock
View File

@@ -452,6 +452,18 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@sf-alvar/db-migrate@npm:1.0.3":
version: 1.0.3
resolution: "@sf-alvar/db-migrate@npm:1.0.3::__archiveUrl=https%3A%2F%2Fgit.savefamilygps.net%2Fapi%2Fpackages%2FSaveFamily%2Fnpm%2F%2540sf-alvar%252Fdb-migrate%2F-%2F1.0.3%2Fdb-migrate-1.0.3.tgz"
dependencies:
pg: "npm:^8.18.0"
yargs: "npm:^18.0.0"
bin:
db-migrate: lib/index.js
checksum: 10/2b5745a5ce60456fc7fee1e6a8580978a520fedd8abbbc695557847cdf2b36aa5e1d795721ad35bc151fc9373dfa023bde73d6f43ba412b17293a1822c09fe6b
languageName: node
linkType: hard
"@standard-schema/spec@npm:^1.0.0": "@standard-schema/spec@npm:^1.0.0":
version: 1.1.0 version: 1.1.0
resolution: "@standard-schema/spec@npm:1.1.0" resolution: "@standard-schema/spec@npm:1.1.0"
@@ -788,6 +800,13 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"ansi-regex@npm:^6.2.2":
version: 6.2.2
resolution: "ansi-regex@npm:6.2.2"
checksum: 10/9b17ce2c6daecc75bcd5966b9ad672c23b184dc3ed9bf3c98a0702f0d2f736c15c10d461913568f2cf527a5e64291c7473358885dd493305c84a1cfed66ba94f
languageName: node
linkType: hard
"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0": "ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0":
version: 4.3.0 version: 4.3.0
resolution: "ansi-styles@npm:4.3.0" resolution: "ansi-styles@npm:4.3.0"
@@ -797,6 +816,13 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"ansi-styles@npm:^6.2.1":
version: 6.2.3
resolution: "ansi-styles@npm:6.2.3"
checksum: 10/c49dad7639f3e48859bd51824c93b9eb0db628afc243c51c3dd2410c4a15ede1a83881c6c7341aa2b159c4f90c11befb38f2ba848c07c66c9f9de4bcd7cb9f30
languageName: node
linkType: hard
"anymatch@npm:~3.1.2": "anymatch@npm:~3.1.2":
version: 3.1.3 version: 3.1.3
resolution: "anymatch@npm:3.1.3" resolution: "anymatch@npm:3.1.3"
@@ -1016,6 +1042,17 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"cliui@npm:^9.0.1":
version: 9.0.1
resolution: "cliui@npm:9.0.1"
dependencies:
string-width: "npm:^7.2.0"
strip-ansi: "npm:^7.1.0"
wrap-ansi: "npm:^9.0.0"
checksum: 10/df43d8d1c6e3254cbb64b1905310d5f6672c595496a3cbe76946c6d24777136886470686f2772ac9edfe547a74bb70e8017530b3554715aee119efd7752fc0d9
languageName: node
linkType: hard
"color-convert@npm:^2.0.1": "color-convert@npm:^2.0.1":
version: 2.0.1 version: 2.0.1
resolution: "color-convert@npm:2.0.1" resolution: "color-convert@npm:2.0.1"
@@ -1187,6 +1224,13 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"emoji-regex@npm:^10.3.0":
version: 10.6.0
resolution: "emoji-regex@npm:10.6.0"
checksum: 10/98cc0b0e1daed1ed25afbf69dcb921fee00f712f51aab93aa1547e4e4e8171725cc4f0098aaa645b4f611a19da11ec9f4623eb6ff2b72314b39a8f2ae7c12bf2
languageName: node
linkType: hard
"emoji-regex@npm:^8.0.0": "emoji-regex@npm:^8.0.0":
version: 8.0.0 version: 8.0.0
resolution: "emoji-regex@npm:8.0.0" resolution: "emoji-regex@npm:8.0.0"
@@ -1580,6 +1624,13 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"get-east-asian-width@npm:^1.0.0":
version: 1.5.0
resolution: "get-east-asian-width@npm:1.5.0"
checksum: 10/60bc34cd1e975055ab99f0f177e31bed3e516ff7cee9c536474383954a976abaa6b94a51d99ad158ef1e372790fa096cab7d07f166bb0778f6587954c0fbe946
languageName: node
linkType: hard
"get-intrinsic@npm:^1.2.5, get-intrinsic@npm:^1.2.6, get-intrinsic@npm:^1.3.0": "get-intrinsic@npm:^1.2.5, get-intrinsic@npm:^1.2.6, get-intrinsic@npm:^1.3.0":
version: 1.3.1 version: 1.3.1
resolution: "get-intrinsic@npm:1.3.1" resolution: "get-intrinsic@npm:1.3.1"
@@ -2805,6 +2856,7 @@ __metadata:
version: 0.0.0-use.local version: 0.0.0-use.local
resolution: "sim-eventos@workspace:." resolution: "sim-eventos@workspace:."
dependencies: dependencies:
"@sf-alvar/db-migrate": "npm:1.0.3"
"@tsconfig/node22": "npm:^22.0.5" "@tsconfig/node22": "npm:^22.0.5"
"@types/amqplib": "npm:^0.10.8" "@types/amqplib": "npm:^0.10.8"
"@types/cors": "npm:^2.8.19" "@types/cors": "npm:^2.8.19"
@@ -2990,6 +3042,17 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"string-width@npm:^7.0.0, string-width@npm:^7.2.0":
version: 7.2.0
resolution: "string-width@npm:7.2.0"
dependencies:
emoji-regex: "npm:^10.3.0"
get-east-asian-width: "npm:^1.0.0"
strip-ansi: "npm:^7.1.0"
checksum: 10/42f9e82f61314904a81393f6ef75b832c39f39761797250de68c041d8ba4df2ef80db49ab6cd3a292923a6f0f409b8c9980d120f7d32c820b4a8a84a2598a295
languageName: node
linkType: hard
"strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1": "strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1":
version: 6.0.1 version: 6.0.1
resolution: "strip-ansi@npm:6.0.1" resolution: "strip-ansi@npm:6.0.1"
@@ -2999,6 +3062,15 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"strip-ansi@npm:^7.1.0":
version: 7.2.0
resolution: "strip-ansi@npm:7.2.0"
dependencies:
ansi-regex: "npm:^6.2.2"
checksum: 10/96da3bc6d73cfba1218625a3d66cf7d37a69bf0920d8735b28f9eeaafcdb6c1fe8440e1ae9eb1ba0ca355dbe8702da872e105e2e939fa93e7851b3cb5dd7d316
languageName: node
linkType: hard
"superagent@npm:^10.3.0": "superagent@npm:^10.3.0":
version: 10.3.0 version: 10.3.0
resolution: "superagent@npm:10.3.0" resolution: "superagent@npm:10.3.0"
@@ -3418,6 +3490,17 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"wrap-ansi@npm:^9.0.0":
version: 9.0.2
resolution: "wrap-ansi@npm:9.0.2"
dependencies:
ansi-styles: "npm:^6.2.1"
string-width: "npm:^7.0.0"
strip-ansi: "npm:^7.1.0"
checksum: 10/f3907e1ea9717404ca53a338fa5a017c2121550c3a5305180e2bc08c03e21aa45068df55b0d7676bf57be1880ba51a84458c17241ebedea485fafa9ef16b4024
languageName: node
linkType: hard
"wrappy@npm:1": "wrappy@npm:1":
version: 1.0.2 version: 1.0.2
resolution: "wrappy@npm:1.0.2" resolution: "wrappy@npm:1.0.2"
@@ -3460,6 +3543,13 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"yargs-parser@npm:^22.0.0":
version: 22.0.0
resolution: "yargs-parser@npm:22.0.0"
checksum: 10/f13c42bad6ebed1a587a72f2db5694f5fa772bcaf409a701691d13cf74eb5adfcf61a2611de08807e319b829d3e5e6e1578b16ebe174cae8e8be3bf7b8e7a19e
languageName: node
linkType: hard
"yargs@npm:17.7.2": "yargs@npm:17.7.2":
version: 17.7.2 version: 17.7.2
resolution: "yargs@npm:17.7.2" resolution: "yargs@npm:17.7.2"
@@ -3474,3 +3564,17 @@ __metadata:
checksum: 10/abb3e37678d6e38ea85485ed86ebe0d1e3464c640d7d9069805ea0da12f69d5a32df8e5625e370f9c96dd1c2dc088ab2d0a4dd32af18222ef3c4224a19471576 checksum: 10/abb3e37678d6e38ea85485ed86ebe0d1e3464c640d7d9069805ea0da12f69d5a32df8e5625e370f9c96dd1c2dc088ab2d0a4dd32af18222ef3c4224a19471576
languageName: node languageName: node
linkType: hard linkType: hard
"yargs@npm:^18.0.0":
version: 18.0.0
resolution: "yargs@npm:18.0.0"
dependencies:
cliui: "npm:^9.0.1"
escalade: "npm:^3.1.1"
get-caller-file: "npm:^2.0.5"
string-width: "npm:^7.2.0"
y18n: "npm:^5.0.5"
yargs-parser: "npm:^22.0.0"
checksum: 10/5af36234871390386b31cac99f00e79fcbc2ead858a61b30a8ca381c5fde5df8af0b407c36b000d3f774bcbe4aec5833f2f1c915f6ddc49ce97b78176b651801
languageName: node
linkType: hard