Merge branch 'main' into seguimiento-tareas

This commit is contained in:
2026-02-13 10:57:54 +01:00
26 changed files with 540 additions and 40 deletions

View File

@@ -0,0 +1,22 @@
# --- Release image ---
FROM node:22-alpine AS release
WORKDIR /home/node/app
RUN corepack enable
COPY ./dist/packages ./packages
COPY ./package.json ./
# Force node-modules linker (no .yarnrc.yml in build context)
RUN echo 'nodeLinker: node-modules' > .yarnrc.yml
RUN yarn install
RUN mkdir -p dist && ln -sf ../packages dist/packages
COPY ./entrypoint.sh ./
RUN chmod +x entrypoint.sh
EXPOSE ${PORT:-3000}
ENTRYPOINT ["./entrypoint.sh"]

View File

@@ -0,0 +1,102 @@
name: sim-eventos
networks:
savefamily:
external: true
proxy:
external: true
internal:
driver: bridge
services:
rabbitmq-sim-broker:
container_name: rabbitmq-sim-broker
image: "rabbitmq:4.2.2-management"
expose:
- 5672
- 15672
env_file:
- ./.env
restart: unless-stopped
healthcheck:
test: ["CMD", "rabbitmq-diagnostics", "check_port_connectivity"]
interval: 10s
timeout: 5s
retries: 5
environment:
RABBITMQ_USER: ${RABBITMQ_USER}
RABBITMQ_PASSWORD: ${RABBITMQ_PASSWORD}
entrypoint: ["bash", "/usr/local/bin/docker-entrypoint-wrapper.sh"]
command: ["rabbitmq-server"]
volumes:
- ./rabbit/docker-entrypoint-wrapper.sh:/usr/local/bin/docker-entrypoint-wrapper.sh:ro
- ./rabbitmq_plugins/enabled_plugins:/etc/rabbitmq/enabled_plugins:ro
- ./rabbit/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:ro
- ./rabbit/definitions.json:/etc/rabbitmq/definitions.template.json:ro
networks:
- internal
- proxy
labels:
- "io.portainer.accesscontrol.teams=develop"
- "traefik.enable=true"
- "traefik.http.routers.sf-sims-rabbitmq.entrypoints=web"
- "traefik.http.routers.sf-sims-rabbitmq.rule=Host(`sf-sims-rabbitmq.savefamilygps.net`)"
- "traefik.http.middlewares.sf-sims-rabbitmq-https-redirect.redirectscheme.scheme=https"
- "traefik.http.routers.sf-sims-rabbitmq.middlewares=sf-sims-rabbitmq-https-redirect"
- "traefik.http.routers.sf-sims-rabbitmq-secure.entrypoints=websecure"
- "traefik.http.routers.sf-sims-rabbitmq-secure.rule=Host(`sf-sims-rabbitmq.savefamilygps.net`)"
- "traefik.http.routers.sf-sims-rabbitmq-secure.tls=true"
- "traefik.http.routers.sf-sims-rabbitmq-secure.service=sf-sims-rabbitmq"
- "traefik.http.routers.sf-sims-rabbitmq-secure.tls.certresolver=myresolver"
- "traefik.http.services.sf-sims-rabbitmq.loadbalancer.server.port=15672"
- "traefik.docker.network=proxy"
sf-sims-api:
container_name: sf-sims-api
build:
context: .
dockerfile: Dockerfile
args:
PORT: ${PORT:-3000}
image: sf-sims-api
env_file:
- ./.env
restart: unless-stopped
depends_on:
rabbitmq-sim-broker:
condition: service_healthy
networks:
- savefamily
- proxy
- internal
expose:
- ${PORT}
volumes:
- ./.env:/home/node/app/.env:ro
- ./sim-consumidor-objenious.env:/home/node/app/packages/sim-consumidor-objenious/.env:ro
- ./sim-objenious-cron.env:/home/node/app/packages/sim-objenious-cron/.env:ro
- ./obj.pem:/home/node/app/packages/sim-consumidor-objenious/obj.pem:ro
- ./obj.pem:/home/node/app/packages/sim-objenious-cron/obj.pem:ro
healthcheck:
test:
[
"CMD-SHELL",
'node -e "fetch(''http://localhost:'' + (process.env.PORT || 3000) + ''/health'').then(r => { if (!r.ok) process.exit(1) }).catch(() => process.exit(1))"',
]
interval: 10s
timeout: 5s
retries: 5
start_period: 15s
labels:
- "io.portainer.accesscontrol.teams=develop"
- "traefik.enable=true"
- "traefik.http.routers.sf-sims.entrypoints=web"
- "traefik.http.routers.sf-sims.rule=Host(`sf-sims.savefamilygps.net`)"
- "traefik.http.middlewares.sf-sims-https-redirect.redirectscheme.scheme=https"
- "traefik.http.routers.sf-sims.middlewares=sf-sims-https-redirect"
- "traefik.http.routers.sf-sims-secure.entrypoints=websecure"
- "traefik.http.routers.sf-sims-secure.rule=Host(`sf-sims.savefamilygps.net`)"
- "traefik.http.routers.sf-sims-secure.tls=true"
- "traefik.http.routers.sf-sims-secure.service=sf-sims"
- "traefik.http.routers.sf-sims-secure.tls.certresolver=myresolver"
- "traefik.http.services.sf-sims.loadbalancer.server.port=${PORT}"
- "traefik.docker.network=proxy"

View File

@@ -0,0 +1,4 @@
#!/bin/sh
cd /home
cd /home/node/app && yarn start

View File

@@ -0,0 +1,9 @@
#!/bin/bash
cd /mnt/docker-storage/containers/savefamily/sf-sims
docker stop sf-sims-api || true
docker rm sf-sims-api || true
docker rmi sf-sims-api || true
docker compose -f docker-compose.yaml up --build -d sf-sims-api

View File

@@ -0,0 +1,111 @@
#!/usr/bin/env groovy
String BASE_REMOTE_PATH = "//home/devops"
String APP_REMOTE_PATH = "//mnt/docker-storage/containers/savefamily/sf-sims"
pipeline {
agent any
tools { nodejs "22.15.0" }
environment {
GENERAL_CHANGES = "false"
}
stages {
stage('📦 Install dependencies') {
steps {
sh 'npm install -g yarn'
sh 'corepack enable'
sh 'corepack prepare yarn@4.12.0 --activate'
sh 'yarn install --immutable'
}
}
stage("🧱 Building") {
steps {
sh 'rm -rf dist/'
sh 'yarn run build'
}
}
stage("🏗 Deploying") {
steps {
sshPublisher(
publishers: [
sshPublisherDesc(
verbose: true,
configName: "Save Family",
transfers: [
sshTransfer(
cleanRemote: false,
execCommand: "mkdir -p $APP_REMOTE_PATH"
),
sshTransfer(
cleanRemote: false,
execCommand: "ln -sf $BASE_REMOTE_PATH/vault/savefamily/sf-sims/.env $APP_REMOTE_PATH/.env"
),
sshTransfer(
cleanRemote: false,
execCommand: "ln -sf $BASE_REMOTE_PATH/vault/savefamily/sf-sims/sim-consumidor-objenious.env $APP_REMOTE_PATH/sim-consumidor-objenious.env"
),
sshTransfer(
cleanRemote: false,
execCommand: "ln -sf $BASE_REMOTE_PATH/vault/savefamily/sf-sims/sim-objenious-cron.env $APP_REMOTE_PATH/sim-objenious-cron.env"
),
sshTransfer(
cleanRemote: false,
execCommand: "ln -sf $BASE_REMOTE_PATH/vault/savefamily/sf-sims/obj.pem $APP_REMOTE_PATH/obj.pem"
),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "dist/**/*",
excludes: "dist/**/node_modules/**"
),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "deployment/database/**/*",
removePrefix: "deployment",
),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "deployment/develop/rabbit/**/*",
removePrefix: "deployment/develop",
),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "rabbitmq_plugins/**/*"
),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "deployment/develop/docker/**/*",
removePrefix: "deployment/develop/docker",
),
sshTransfer(
cleanRemote: false,
remoteDirectory: "$APP_REMOTE_PATH",
sourceFiles: "package.json",
),
sshTransfer(
cleanRemote: false,
execCommand: "sh $APP_REMOTE_PATH/rebuild.sh"
)
]
)
]
)
}
}
}
post {
failure {
echo '👎🏼 Processing failed'
}
success {
echo '👍🏼 Processing success'
}
}
}

View File

@@ -0,0 +1,90 @@
{
"rabbit_version": "4.2.2",
"rabbitmq_version": "4.2.2",
"product_name": "RabbitMQ",
"product_version": "4.2.2",
"users": [
{
"name": "RABBITMQ_USER_PLACEHOLDER",
"password": "RABBITMQ_PASSWORD_PLACEHOLDER",
"tags": ["administrator"]
}
],
"vhosts": [
{
"name": "sim-vhost"
}
],
"permissions": [
{
"user": "RABBITMQ_USER_PLACEHOLDER",
"vhost": "sim-vhost",
"configure": ".*",
"write": ".*",
"read": ".*"
}
],
"topic_permissions": [],
"parameters": [],
"global_parameters": [
{
"name": "cluster_name",
"value": "rabbit@a8d5c6e08439"
},
{
"name": "internal_cluster_id",
"value": "rabbitmq-cluster-id-gXeBLbsUC2W2tU0Bx_QY_w"
}
],
"policies": [
{
"vhost": "sim-vhost",
"name": "pol.sim.dlx",
"pattern": "sim.*",
"apply-to": "queues",
"definition": {
"dead-letter-exchange": "sim.dlx"
},
"priority": 7
}
],
"exchanges": [
{
"name": "sim.exchange",
"vhost": "sim-vhost",
"type": "topic",
"durable": true,
"auto_delete": false,
"internal": false,
"argurments": {}
},
{
"name": "sim.dlx",
"vhost": "sim-vhost",
"type": "topic",
"durable": true,
"auto_delete": false,
"internal": false,
"argurments": {}
}
],
"queues": [
{
"name": "sim.logs",
"vhost": "sim-vhost",
"durable": true,
"auto_delete": false,
"arguments": {}
}
],
"bindings": [
{
"source": "sim.exchange",
"vhost": "sim-vhost",
"destination": "sim.logs",
"destination_type": "queue",
"routing_key": "sim.#",
"arguments": {}
}
]
}

View File

@@ -0,0 +1,12 @@
#!/bin/bash
set -eu
# Substitute env vars into definitions template before RabbitMQ starts.
# RabbitMQ 4.x skips default user creation when definitions.json is loaded,
# so the user must be defined in the JSON itself.
sed \
-e "s|RABBITMQ_USER_PLACEHOLDER|${RABBITMQ_USER}|g" \
-e "s|RABBITMQ_PASSWORD_PLACEHOLDER|${RABBITMQ_PASSWORD}|g" \
/etc/rabbitmq/definitions.template.json > /etc/rabbitmq/definitions.json
exec docker-entrypoint.sh "$@"

View File

@@ -0,0 +1,4 @@
management.load_definitions = /etc/rabbitmq/definitions.json
default_vhost = sim-vhost
default_queue_type = quorum

View File

@@ -11,7 +11,7 @@ post {
}
body:form-urlencoded {
iccid: 8933201125065160406
iccid: 8933201125065160331
offer: SAVEFAMILY1
}

View File

@@ -0,0 +1,16 @@
meta {
name: Activation Email Health
type: http
seq: 8
}
post {
url: https://sf-sim-activation.savefamily.net/health
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,26 @@
meta {
name: Activation Email
type: http
seq: 6
}
post {
url: https://sf-sim-activation.savefamily.net/send-activation-mail
body: json
auth: inherit
}
headers {
x-apikey-sim-activation: 9e48c4ac-1ab0-4397-b3f3-6c239200dfe6
}
body:json {
{
"iccids":["1234"]
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,3 @@
vars {
baseurl: https://sf-sims.savefamilygps.net
}

View File

@@ -37,7 +37,7 @@ body:form-urlencoded {
}
vars:pre-request {
params.id: 14111
params.id: 14333
}
settings {

View File

@@ -0,0 +1,33 @@
meta {
name: Unit action by id copy
type: http
seq: 19
}
get {
url: https://api-getway.objenious.com/ws/actions/massActions/{{id}}
body: formUrlEncoded
auth: bearer
}
auth:bearer {
token: {{ws-access-token-partenaire}}
}
body:json {
{
"identifier": {
"identifiers": ["8933201124059175967"],
"identifierType": "ICCID"
}
}
}
vars:pre-request {
id: 5192767
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -1,5 +1,6 @@
{
"name": "sim-eventos",
"version": "1.0.0",
"packageManager": "yarn@4.12.0",
"workspaces": [
"packages/*"

View File

@@ -9,8 +9,8 @@ export const env = {
POSTGRES_HOST: process.env.POSTGRES_HOST,
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE,
RABBITMQ_HOST: String(process.env.RABBITMQ_HOST ?? "localhost"),
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "guest"),
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "guest"),
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "test"),
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "test"),
RABBITMQ_EXCHANGE: String(process.env.RABBITMQ_EXCHANGE ?? "/"),
RABBITMQ_PORT: parseInt(process.env.RABBITMQ_PORT ?? "5672"),
RABBITMQ_MODULENAME: process.env.MODULENAME,

View File

@@ -8,7 +8,6 @@ import { ObjeniousOperation, IOperationsRepository as OperationsRepositoryPort }
// - Pasar a un archivo de DTOs
// - Mucha repeticion por funcion, deberia hacer una plantilla
export class SimUseCases {
private readonly httpClient: HttpClient
private readonly operationRepository: OperationsRepositoryPort
@@ -41,7 +40,7 @@ export class SimUseCases {
const operation: ObjeniousOperation = {
operation: "activate",
iccids: activationData.identifier.identifiers,
iccids: String(activationData.identifier.identifiers),
status: "noMassID",
request_id: response.data.requestId
}
@@ -84,7 +83,7 @@ export class SimUseCases {
console.log("Sim preactivada con exito", resp.data)
const operation: ObjeniousOperation = {
operation: "preActivate",
iccids: preActivateData.identifier.identifiers,
iccids: String(preActivateData.identifier.identifiers),
status: "noMassID",
request_id: resp.data.requestId
}

View File

@@ -12,8 +12,8 @@ export const env = {
POSTGRES_HOST: process.env.POSTGRES_HOST,
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE,
RABBITMQ_HOST: String(process.env.RABBITMQ_HOST ?? "localhost"),
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "guest"),
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "guest"),
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "test"),
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "test"),
RABBITMQ_EXCHANGE: String(process.env.RABBITMQ_EXCHANGE ?? "/"),
RABBITMQ_PORT: parseInt(process.env.RABBITMQ_PORT ?? "5672"),
RABBITMQ_MODULENAME: process.env.MODULENAME,

View File

@@ -1,7 +1,8 @@
import { loadEnvFile } from "node:process";
import path from "node:path";
loadEnvFile(path.join(import.meta.dirname, "../../../../.env"))
loadEnvFile(path.join("../../.env")) // Global
export const env = {
ENVIRONMENT: process.env.ENVIORMENT,
@@ -12,8 +13,8 @@ export const env = {
POSTGRES_HOST: process.env.POSTGRES_HOST,
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE,
RABBITMQ_HOST: String(process.env.RABBITMQ_HOST ?? "localhost"),
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "guest"),
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "guest"),
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "test"),
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "test"),
RABBITMQ_EXCHANGE: String(process.env.RABBITMQ_EXCHANGE ?? "/"),
RABBITMQ_PORT: parseInt(process.env.RABBITMQ_PORT ?? "5672"),
RABBITMQ_MODULENAME: process.env.MODULENAME,

View File

@@ -5,4 +5,7 @@ OBJ_CLI_ASSERTION=XOc7FtwXD8hUX2SFVX94XSty8wkOmChkwDNF09O_aIxPubMDdFUdCDCB4zpzSI
OBJ_CLIENT_ID=savefamily_rest_ws
OBJ_KID=xNfbMiyL1ORXGP8lElhcv8nVaG3EJKye4Lc1YoN3I1E
OBJ_BASE_URL=https://api-getway.objenious.com/ws
//OBJ_BASE_URL=https://api-getway.objenious.com/ws/test
# OBJ_BASE_URL=https://api-getway.objenious.com/ws/test
NOTIFICATION_URL="https://sf-sim-activation.savefamilygps.net/send-activation-mail"
SIM_ACTIVATION_API_KEY=9e48c4ac-1ab0-4397-b3f3-6c239200dfe6

View File

@@ -1,5 +1,6 @@
import { loadEnvFile } from "node:process";
import path from "node:path";
import assert from "node:assert";
loadEnvFile(path.join("../../.env")) // Global
loadEnvFile(path.join("./.env")) // base
@@ -12,9 +13,9 @@ export const env = {
POSTGRES_HOST: process.env.POSTGRES_HOST,
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE,
RABBITMQ_HOST: String(process.env.RABBITMQ_HOST ?? "localhost"),
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "guest"),
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "guest"),
RABBITMQ_EXCHANGE: String(process.env.RABBITMQ_EXCHANGE ?? "/"),
RABBITMQ_USER: String(process.env.RABBITMQ_USER),
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD),
RABBITMQ_EXCHANGE: String(process.env.RABBITMQ_EXCHANGE),
RABBITMQ_PORT: parseInt(process.env.RABBITMQ_PORT ?? "5672"),
RABBITMQ_MODULENAME: process.env.MODULENAME,
RABBITMQ_TTL: process.env.RABBITMQ_TTL,
@@ -28,7 +29,22 @@ export const env = {
OBJ_CLI_ASSERTION: String(process.env.OBJ_CLI_ASSERTION),
OBJ_CLIENT_ID: String(process.env.OBJ_CLIENT_ID),
OBJ_KID: String(process.env.OBJ_KID),
OBJ_BASE_URL: String(process.env.OBJ_BASE_URL)
OBJ_BASE_URL: String(process.env.OBJ_BASE_URL),
NOTIFICATION_URL: String(process.env.NOTIFICATION_URL),
SIM_ACTIVATION_API_KEY: String(process.env.SIM_ACTIVATION_API_KEY)
};
// assert las partes criticas
assert(env.RABBITMQ_PASSWORD != undefined)
assert(env.RABBITMQ_USER != undefined)
assert(env.SIM_ACTIVATION_API_KEY != undefined)
assert(env.NOTIFICATION_URL != undefined)
if (env.ENVIRONMENT == "production") {
assert(env.RABBITMQ_PASSWORD != "guest")
assert(env.RABBITMQ_HOST != "localhost")
}
console.log("CRON: ENV", env)

View File

@@ -22,6 +22,7 @@ async function startCron() {
httpClient
)
await objTask.getPendingOperations()
const interval = setInterval(async () => {
console.log("Updating...")
await objTask.getPendingOperations()

View File

@@ -1 +0,0 @@
export const task = async () => console.log("Background " + new Date().toISOString())

View File

@@ -1,3 +1,5 @@
import { env } from "#config/env/index.js";
import axios from "axios";
import { IOperationsRepository, Objenious, ObjeniousOperation, ObjeniousOperationChange, StatusEnum } from "sim-shared/domain/operationsRepository.port.js";
import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js";
@@ -36,14 +38,13 @@ export class CheckObjeniousRequests {
const consultarEstado = pendingOperations.data
.filter(e => e.mass_action_id != undefined)
console.log("validas", operacionesValidas)
console.log("Solicitando mass id para", solicitarMassId)
console.log("[cron] Solicitando mass id para", solicitarMassId.map(e => e.id))
const newMassActions = await this.getMassIdFromRequest(solicitarMassId)
const merged = [...newMassActions || [], ...consultarEstado]
console.log("Solicitando status para", merged)
console.log("[cron] Solicitando status para", merged.map(e => e.id))
const result = await this.getMassActionsStatus(merged)
}
@@ -55,16 +56,9 @@ export class CheckObjeniousRequests {
const PATH = "/actions/massActions/"
const updated = []
const iccids = operationsList
.map(e => e.iccids)
.flat()
const mass_actions = operationsList
.filter(e => e.mass_action_id != undefined)
const iccidSet = new Set<string>(iccids)
console.log("iccidSet", iccidSet)
// 1. Una peticion por cada accion a comprobar
// Las peticiones por iccid u otro filtro tardan ~50s
for (const originalAction of mass_actions) {
@@ -79,18 +73,18 @@ export class CheckObjeniousRequests {
try {
res = await req
} catch (e) {
console.error("Error comprobando el estado de ", originalAction)
console.error("Error: ", e)
console.error("[cron] Error comprobando el estado de ", originalAction)
console.error("[cron] Error: ", e)
return;
}
const { data } = res
console.log("Estado de : ", originalAction.mass_action_id, originalAction.iccids)
console.log("[cron] Estado de : ", originalAction.mass_action_id, originalAction.iccids)
console.log(res.status, data)
if (res.status != 200 || data == undefined) {
console.error("Error buscando los massActions")
console.error("[cron] Error buscando los massActions")
continue;
}
@@ -98,8 +92,7 @@ export class CheckObjeniousRequests {
const { id, status, info } = data
if (status != originalAction.objenious_status) {
console.log(status, "!=", originalAction.objenious_status)
console.log("Actualizando", originalAction, status)
console.log("[cron] Actualizando", originalAction.id, originalAction.iccids, status)
const uorStatus = this.mapStatus(status)
const updateData: ObjeniousOperationChange = {
operation_id: originalAction.id!,
@@ -112,6 +105,27 @@ export class CheckObjeniousRequests {
originalAction.status = uorStatus;
originalAction.objenious_status = status;
originalAction.last_change_date = new Date().toISOString()
originalAction.end_date = originalAction.last_change_date
console.log(" ----> Status", uorStatus)
if (uorStatus /*== "finished"*/) {
console.log(" ****> Status", uorStatus)
const targetIccids = originalAction.iccids
const lineData = await this.getLineData(targetIccids)
console.log("lineData", lineData.content[0])
const msisdn = lineData.content[0].identifier.msisdn
this.notifyFinalization({
...originalAction,
msisdn
})
.then(e => {
console.log("Notificada la activacion de ", originalAction.iccids)
})
.catch(e => {
console.error("Error enviando la activacion de ", originalAction)
console.error(e)
})
}
if (info != undefined) {
updateData.info = info
@@ -144,6 +158,26 @@ export class CheckObjeniousRequests {
return res
}
private async getLineData(iccids: string) {
const PATH = "/lines"
const req = this.httpClient.client.get(PATH, {
params: {
pageSize: 100, // no hace fata
"identifier.identifierType": "ICCID",
"identifier.identifiers": iccids
}
})
try {
const res = await req
return res.data
} catch (e) {
console.error("Error obteniendo datos de la sim")
throw new Error(String(e))
}
}
/**
* Refrescar los requests hasta que conseguir una Id de mass action
* Como no se puede consultar por
@@ -191,11 +225,26 @@ export class CheckObjeniousRequests {
console.log("Error actualizando el estado de ", request)
continue;
}
}
// 3. Se devuelve la lista de los requests con las actualizaciones
return operationsList
}
/**
* Se devuelve la respuesta de una operacion completa de objenious
* al servicio que manda los mails
*/
private async notifyFinalization(operation: ObjeniousOperation & { msisdn: string }) {
const req = axios.post(env.NOTIFICATION_URL, {
...operation,
iccids: [operation.iccids]
}, {
headers: {
"x-apikey-sim-activation": env.SIM_ACTIVATION_API_KEY
}
})
await req
}
}

View File

@@ -13,11 +13,11 @@ export type ObjeniousOperation = {
operation: string;
retry_count?: number;
max_retry?: number;
max_date_retry?: Date | null;
iccids: string[];
max_date_retry?: string | null;
iccids: string; // Deberia ser string[] pero no parseo la lista de iccids
request_id?: string;
mass_action_id?: string;
end_date?: Date | null;
end_date?: string | null;
error?: string | null;
status: StatusEnum;
objenious_status?: string;

View File

@@ -14,8 +14,7 @@ export class OperationsRepository implements IOperationsRepository {
INSERT INTO objenious_operation (operation, iccids, status, max_retry, request_id)
VALUES ($1, $2, $3, $4, $5)
RETURNING *`;
const iccids = JSON.stringify(data.iccids)
const values = [data.operation, iccids, data.status, data.max_retry, data.request_id];
const values = [data.operation, data.iccids, data.status, data.max_retry, data.request_id];
const { rows } = await this.pgClient.query(query, values);
return <Result<string, ObjeniousOperation>>{
data: rows[0]