@nestjs/common#OnModuleDestroy TypeScript Examples

The following examples show how to use @nestjs/common#OnModuleDestroy. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: datadog.service.ts    From ironfish-api with Mozilla Public License 2.0 6 votes vote down vote up
@Injectable()
export class DatadogService implements OnModuleDestroy {
  private datadogClient: StatsD;

  constructor(private readonly config: ApiConfigService) {
    this.datadogClient = new StatsD({
      bufferFlushInterval: 1000,
      globalTags: {
        env: this.config.get<string>('NODE_ENV'),
      },
      host: this.config.get<string>('DATADOG_URL'),
      port: DEFAULT_PORT,
      prefix: 'api.',
    });
  }

  timing(stat: string, value: number, tags?: Tags): void {
    this.datadogClient.timing(stat, value, tags);
  }

  increment(stat: string, value: number, tags?: Tags): void {
    this.datadogClient.increment(stat, value, tags);
  }

  onModuleDestroy(): void {
    this.datadogClient.close();
  }
}
Example #2
Source File: influxdb.service.ts    From ironfish-api with Mozilla Public License 2.0 5 votes vote down vote up
@Injectable()
export class InfluxDbService implements OnModuleDestroy {
  private writeClient: WriteApi;

  constructor(private readonly config: ApiConfigService) {
    const client = new InfluxDB({
      token: config.get<string>('INFLUXDB_API_TOKEN'),
      url: config.get<string>('INFLUXDB_URL'),
    });
    this.writeClient = client.getWriteApi(
      INFLUXDB_ORG,
      config.get<string>('INFLUXDB_BUCKET'),
      'ms',
    );
  }

  writePoints(options: CreatePointOptions[]): void {
    const points = [];

    for (const option of options) {
      const { fields, measurement, tags, timestamp } = option;
      const point = new Point(measurement).timestamp(timestamp);

      for (const field of fields) {
        const { name } = field;
        if (field.type === 'boolean') {
          point.booleanField(name, field.value);
        } else if (field.type === 'float') {
          point.floatField(name, field.value);
        } else if (field.type === 'integer') {
          point.intField(name, field.value);
        } else {
          point.stringField(name, field.value);
        }
      }

      for (const tag of tags) {
        point.tag(tag.name, tag.value);
      }

      points.push(point);
    }

    this.writeClient.writePoints(points);
  }

  async onModuleDestroy(): Promise<void> {
    await this.writeClient.close();
  }
}
Example #3
Source File: throttler-storage-redis.service.ts    From nestjs-throttler-storage-redis with MIT License 5 votes vote down vote up
@Injectable()
export class ThrottlerStorageRedisService implements ThrottlerStorageRedis, OnModuleDestroy {
  redis: Redis;
  disconnectRequired?: boolean;
  scanCount: number;

  constructor(redis?: Redis, scanCount?: number);
  constructor(options?: RedisOptions, scanCount?: number);
  constructor(url?: string, scanCount?: number);
  constructor(redisOrOptions?: Redis | RedisOptions | string, scanCount?: number) {
    this.scanCount = typeof scanCount === 'undefined' ? 1000 : scanCount;

    if (redisOrOptions instanceof Redis) {
      this.redis = redisOrOptions;
    } else if (typeof redisOrOptions === 'string') {
      this.redis = new Redis(redisOrOptions as string);
      this.disconnectRequired = true;
    } else {
      this.redis = new Redis(redisOrOptions);
      this.disconnectRequired = true;
    }
  }

  async getRecord(key: string): Promise<number[]> {
    const ttls = (
      await this.redis.scan(
        0,
        'MATCH',
        `${this.redis?.options?.keyPrefix}${key}:*`,
        'COUNT',
        this.scanCount,
      )
    ).pop();
    return (ttls as string[]).map((k) => parseInt(k.split(':').pop())).sort();
  }

  async addRecord(key: string, ttl: number): Promise<void> {
    await this.redis.set(`${key}:${Date.now() + ttl * 1000}`, ttl, 'EX', ttl);
  }

  onModuleDestroy() {
    if (this.disconnectRequired) {
      this.redis?.disconnect(false);
    }
  }
}
Example #4
Source File: async-context.ts    From nest-xray with MIT License 5 votes vote down vote up
export class AsyncContext implements OnModuleInit, OnModuleDestroy {
  public static getInstance(): AsyncContext {
    if (!this.instance) {
      this.initialize();
    }
    return this.instance;
  }

  private static instance: AsyncContext;

  private static initialize() {
    const asyncHooksStorage = new AsyncHooksStorage();
    const asyncHook = AsyncHooksHelper.createHooks(asyncHooksStorage);
    const storage = asyncHooksStorage.getInternalStorage();

    this.instance = new AsyncContext(storage, asyncHook);
  }

  private constructor(
    private readonly internalStorage: Map<number, any>,
    private readonly asyncHookRef: asyncHooks.AsyncHook
  ) {}

  public onModuleInit() {
    this.asyncHookRef.enable();
  }

  public onModuleDestroy() {
    this.asyncHookRef.disable();
  }

  public set<TKey = any, TValue = any>(key: TKey, value: TValue) {
    const store = this.getAsyncStorage();
    store.set(key, value);
  }

  public get<TKey = any, TReturnValue = any>(key: TKey): TReturnValue {
    const store = this.getAsyncStorage();
    return store.get(key) as TReturnValue;
  }

  public run(fn: Function) {
    const eid = asyncHooks.executionAsyncId();
    this.internalStorage.set(eid, new Map());
    fn();
  }

  private getAsyncStorage(): Map<unknown, unknown> {
    const eid = asyncHooks.executionAsyncId();
    const state = this.internalStorage.get(eid);
    if (!state) {
      throw new UnknownAsyncContextException(eid);
    }
    return state;
  }
}
Example #5
Source File: database.service.ts    From nestjs-angular-starter with MIT License 5 votes vote down vote up
/**
 * Responsible of connecting and closing the mongo database connection, it will reconnect a few times
 * if connection cannot be established on the first time.
 */
@Injectable()
export class DatabaseService
  implements IDatabaseService, OnModuleInit, OnModuleDestroy {
  connectionManager: DatabaseConnectionManager =
    DatabaseConnectionManager.instance;
  connection: mongoose.Connection;

  constructor(
    @Inject('DATABASE_MODULE_CONFIG')
    private readonly config: DatabaseModuleConfig,
  ) {}

  /**
   * This forces the database to connect when the database module is imported.
   */
  async onModuleInit(): Promise<void> {
    await this.connectWithRetry(this.config.retryCount || 8);
  }

  protected async connectWithRetry(retryCount: number): Promise<void> {
    for (let i = 0; i < retryCount; i++) {
      try {
        await this.connect();
        return;
      } catch (error) {
        Logger.log(`Retrying to connect database in 5 seconds...`);
        await sleepAsync(5000);
      }
    }
  }

  async connect(): Promise<void> {
    this.connection = await this.connectionManager.connectDatabase(
      this.config.uri,
    );
  }

  async close(): Promise<void> {
    const db = mongoose.connection;
    if (db) await db.close();
  }

  /**
   * Closes the database when the app is being closed.
   */
  async onModuleDestroy(): Promise<void> {
    await this.close();
  }
}
Example #6
Source File: database.test.service.ts    From nestjs-angular-starter with MIT License 5 votes vote down vote up
/**
 * The database test service mocks the original database service, by creating a mongo-in-memory server
 * instead of using the real database.
 */
@Injectable()
export class DatabaseTestService
  implements IDatabaseService, OnModuleInit, OnModuleDestroy {
  // A singletone approach to get the handle to the database on tests and easily close it
  static instance: DatabaseTestService;

  // Create an instance of the in-memory mongo database
  protected mongoServer = new MongoMemoryServer();

  constructor() {
    // Set the instance
    DatabaseTestService.instance = this;
  }

  async onModuleInit(): Promise<void> {
    await this.connect();
  }

  /**
   * Sets up the in-memory mongo database, connects to the database and starts the server.
   */
  async connect(): Promise<void> {
    const uri = await this.mongoServer.getUri();
    await mongoose.connect(uri, {
      useNewUrlParser: true,
      useUnifiedTopology: true,
      useCreateIndex: true,
    });
    await TestDBSetup.instance.setup();
  }

  /**
   * Closes the database connection and it's related mongo server emulator.
   */
  async close(): Promise<void> {
    const db = mongoose.connection;
    if (db) await db.close();
    await this.mongoServer.stop();
  }

  async onModuleDestroy(): Promise<void> {
    await this.close();
  }
}
Example #7
Source File: app.module.ts    From knests with MIT License 5 votes vote down vote up
@Module({
  imports: [
    ConfigModule.forRoot({
      ignoreEnvFile: false,
    }),
    KnexModule.forRoot({
      config: {
        client: 'pg',
        debug: true,
        connection: process.env.DATABASE_URL,
        pool: { min: 0, max: 7, idleTimeoutMillis: 300_000 },
      },
    }),
    // schema first dev
    GraphQLModule.forRoot({
      typePaths: ['./**/*.graphql'],
      // installSubscriptionHandlers: true,
      installSubscriptionHandlers: false,
      debug: !isProduction,
      playground: !isProduction,
      context: ({ req }) => ({ req }),
      definitions: {
        path: join(process.cwd(), 'src/graphql.ts'),
        outputAs: 'class',
      },
    }),
    // this would be for "code first" development
    // GraphQLModule.forRoot({
    //   context: ({ req }) => ({ req }),
    //   autoSchemaFile: 'schema.gql',
    //   debug: !isProduction,
    //   playground: !isProduction,
    // }),
    AuthModule,
    UsersModule,
    AppService,
  ],
  controllers: [AppController],
  providers: [AppService],
})

export class AppModule implements OnModuleDestroy {

  @InjectKnex() private readonly knex: Knex;

  async onModuleDestroy() {
    await this.knex.destroy();
  }
}
Example #8
Source File: event-store.service.ts    From nestjs-geteventstore with MIT License 4 votes vote down vote up
@Injectable()
export class EventStoreService
  implements OnModuleInit, OnModuleDestroy, IEventStoreService
{
  private logger: Logger = new Logger(this.constructor.name);
  private persistentSubscriptions: PersistentSubscription[];

  private isOnError = false;
  private isTryingToConnect = true;
  private isTryingToWriteEvents = false;
  private isTryingToWriteMetadatas = false;

  private connectionRetryFallback: Timeout;
  constructor(
    @Inject(EVENT_STORE_CONNECTOR)
    private readonly eventStore: Client,
    @Inject(EVENT_STORE_SUBSYSTEMS)
    private readonly subsystems: IEventStoreSubsystems,
    @Inject(EVENTS_AND_METADATAS_STACKER)
    private readonly eventsStacker: IEventsAndMetadatasStacker,
    private readonly eventStoreHealthIndicator: EventStoreHealthIndicator,
    @Optional() private readonly eventBus?: ReadEventBus,
  ) {}

  public async onModuleInit(): Promise<void> {
    return await this.connect();
  }

  public onModuleDestroy(): void {
    clearTimeout(this.connectionRetryFallback);
  }

  private async connect(): Promise<void> {
    try {
      if (this.subsystems.subscriptions)
        this.persistentSubscriptions =
          await this.subscribeToPersistentSubscriptions(
            this.subsystems.subscriptions.persistent,
          );
      if (this.subsystems.projections)
        await this.upsertProjections(this.subsystems.projections).catch((e) =>
          this.logger.error(e),
        );

      this.isOnError = false;
      this.isTryingToConnect = false;
      this.logger.log(`EventStore connected`);
      this.eventStoreHealthIndicator.updateStatus({
        connection: 'up',
        subscriptions: 'up',
      });
      await this.tryToWriteStackedEventBatches();
      await this.tryToWriteStackedMetadatas();
    } catch (e) {
      this.isTryingToConnect = true;
      this.eventStoreHealthIndicator.updateStatus({
        connection: 'down',
        subscriptions: 'down',
      });
      await this.retryToConnect();
    }
  }

  private async retryToConnect(): Promise<void> {
    this.logger.log(`EventStore connection failed : trying to reconnect`);
    this.connectionRetryFallback = setTimeout(
      async () => await this.connect(),
      RECONNECTION_TRY_DELAY_IN_MS,
    );
  }

  public async createProjection(
    query: string,
    type: 'oneTime' | 'continuous' | 'transient',
    projectionName?: string,
    options?:
      | CreateContinuousProjectionOptions
      | CreateTransientProjectionOptions
      | CreateOneTimeProjectionOptions,
  ): Promise<void> {
    switch (type) {
      case 'continuous':
        await this.eventStore.createContinuousProjection(
          projectionName,
          query,
          options ?? {},
        );
        break;
      case 'transient':
        await this.eventStore.createTransientProjection(
          projectionName,
          query,
          options ?? {},
        );
        break;
      case 'oneTime': {
        await this.eventStore.createOneTimeProjection(query, options ?? {});
        break;
      }
      default:
        return;
    }
  }

  public getProjectionState<T>(
    streamName: string,
    options?: GetProjectionStateOptions,
  ): Promise<T> {
    return this.eventStore.getProjectionState<T>(streamName, options);
  }

  public async updateProjection(
    projection: EventStoreProjection,
    content: string,
  ): Promise<void> {
    await this.eventStore.updateProjection(projection.name, content, {
      trackEmittedStreams: projection.trackEmittedStreams,
    });
  }

  private extractProjectionContent(projection: EventStoreProjection): string {
    let content;
    if (projection.content) {
      this.logger.log(`"${projection.name}" projection in content`);
      content = projection.content;
    } else if (projection.file) {
      this.logger.log(`"${projection.name}" projection in file`);
      content = readFileSync(projection.file, 'utf8');
    }
    return content;
  }

  public async upsertProjections(
    projections: EventStoreProjection[],
  ): Promise<void> {
    for (const projection of projections) {
      this.logger.log(`Upserting projection "${projection.name}"...`);

      const content = this.extractProjectionContent(projection);
      await this.upsertProjection(content, projection);

      this.logger.log(`Projection "${projection.name}" upserted !`);
    }
  }

  private async upsertProjection(
    content: string,
    projection: EventStoreProjection,
  ): Promise<void> {
    await this.createProjection(
      content ?? projection.content,
      projection.mode,
      projection.name,
      {
        trackEmittedStreams: projection.trackEmittedStreams,
      },
    ).catch(async (e) => {
      if (EventStoreService.isNotAProjectionAlreadyExistsError(e)) {
        throw Error(e);
      }
      await this.updateProjection(projection, content);
    });
  }

  public async createPersistentSubscription(
    streamName: string,
    groupName: string,
    settings: Partial<PersistentSubscriptionSettings>,
    options?: BaseOptions,
  ): Promise<void> {
    try {
      await this.eventStore.createPersistentSubscription(
        streamName,
        groupName,
        {
          ...persistentSubscriptionSettingsFromDefaults(),
          ...settings,
        },
        options,
      );
    } catch (e) {
      this.logger.error(e);
    }
  }

  public async updatePersistentSubscription(
    streamName: string,
    group: string,
    options: Partial<PersistentSubscriptionSettings>,
    credentials?: Credentials,
  ): Promise<void> {
    try {
      await this.eventStore.updatePersistentSubscription(
        streamName,
        group,
        {
          ...persistentSubscriptionSettingsFromDefaults(),
          ...options,
        } as PersistentSubscriptionSettings,
        { credentials },
      );
    } catch (e) {
      this.logger.error(e);
    }
  }

  public async deletePersistentSubscription(
    streamName: string,
    groupName: string,
    options?: DeletePersistentSubscriptionOptions,
  ): Promise<void> {
    try {
      await this.eventStore.deletePersistentSubscription(
        streamName,
        groupName,
        options,
      );
    } catch (e) {
      this.logger.error(`Error while deleting persistent subscription`);
      this.subsystems.onConnectionFail(e);
    }
  }

  public async subscribeToPersistentSubscriptions(
    subscriptions: IPersistentSubscriptionConfig[] = [],
  ): Promise<PersistentSubscription[]> {
    await this.upsertPersistentSubscriptions(subscriptions);

    return Promise.all(
      subscriptions.map(
        (config: IPersistentSubscriptionConfig): PersistentSubscription => {
          this.logger.log(
            `Connecting to persistent subscription "${config.group}" on stream "${config.stream}"...`,
          );
          const onEvent = (subscription, payload) => {
            return this.subsystems.onEvent
              ? this.subsystems.onEvent(subscription, payload)
              : this.onEvent(subscription, payload);
          };
          const persistentSubscription: PersistentSubscription =
            this.eventStore.connectToPersistentSubscription(
              config.stream,
              config.group,
            );
          if (!isNil(onEvent)) {
            persistentSubscription.on('data', (subscription, payload) => {
              onEvent(subscription, payload);
            });
          }
          if (!isNil(config.onSubscriptionStart)) {
            persistentSubscription.on(
              'confirmation',
              config.onSubscriptionStart,
            );
          }
          if (!isNil(config.onSubscriptionDropped)) {
            persistentSubscription.on('close', config.onSubscriptionDropped);
          }

          persistentSubscription.on('error', config.onError);

          persistentSubscription.on('error', async (): Promise<void> => {
            this.eventStoreHealthIndicator.updateStatus({
              subscriptions: 'down',
            });
            if (!this.isTryingToConnect) await this.connect();
          });
          this.logger.log(
            `Connected to persistent subscription "${config.group}" on stream "${config.stream}" !`,
          );
          return persistentSubscription;
        },
      ),
    );
  }

  private async upsertPersistentSubscriptions(
    subscriptions: IPersistentSubscriptionConfig[],
  ): Promise<void> {
    for (const subscription of subscriptions) {
      await this.upsertPersistentSubscription(subscription);
    }
  }

  private async upsertPersistentSubscription(
    subscription: IPersistentSubscriptionConfig,
  ): Promise<void> {
    try {
      await this.eventStore.createPersistentSubscription(
        subscription.stream,
        subscription.group,
        {
          ...persistentSubscriptionSettingsFromDefaults(),
          ...subscription.settingsForCreation?.subscriptionSettings,
        },
        subscription.settingsForCreation?.baseOptions,
      );
      this.logger.log(
        `Persistent subscription "${subscription.group}" on stream ${subscription.stream} created.`,
      );
    } catch (e) {
      if (EventStoreService.isNotAlreadyExistsError(e)) {
        this.logger.error('Subscription creation try : ', e);
        throw new Error(e);
      }
      await this.eventStore.updatePersistentSubscription(
        subscription.stream,
        subscription.group,
        {
          ...persistentSubscriptionSettingsFromDefaults(),
          ...subscription.settingsForCreation.subscriptionSettings,
        },
        subscription.settingsForCreation.baseOptions,
      );
    }
  }

  private static isNotAlreadyExistsError(e) {
    return e.code !== PERSISTENT_SUBSCRIPTION_ALREADY_EXIST_ERROR_CODE;
  }

  private static isNotAProjectionAlreadyExistsError(e): boolean {
    return e.code !== PROJECTION_ALREADY_EXIST_ERROR_CODE;
  }

  public getPersistentSubscriptions(): PersistentSubscription[] {
    return this.persistentSubscriptions;
  }

  public readMetadata(stream: string): Promise<GetStreamMetadataResult> {
    try {
      return this.eventStore.getStreamMetadata(stream);
    } catch (e) {
      this.logger.error(`Error while reading metadatas of stream ${stream}`);
      this.subsystems.onConnectionFail(e);
    }
  }

  public async writeMetadata(
    streamName: string,
    metadata: StreamMetadata,
    options?: SetStreamMetadataOptions,
  ): Promise<AppendResult> {
    this.eventsStacker.putMetadatasInWaitingLine({
      streamName,
      metadata,
      options,
    });

    return this.isTryingToWriteMetadatas
      ? null
      : await this.tryToWriteStackedMetadatas();
  }

  private async tryToWriteStackedMetadatas(): Promise<null | AppendResult> {
    try {
      this.isTryingToWriteMetadatas = true;
      let lastValidAppendResult: AppendResult = null;
      while (this.eventsStacker.getMetadatasWaitingLineLength() > 0) {
        const metadata: MetadatasContextDatas =
          this.eventsStacker.getFirstOutFromMetadatasWaitingLine();
        lastValidAppendResult = await this.eventStore.setStreamMetadata(
          metadata.streamName,
          metadata.metadata,
          metadata.options,
        );
        this.eventsStacker.shiftMetadatasFromWaitingLine();
      }
      this.isTryingToWriteMetadatas = false;
      return lastValidAppendResult;
    } catch (e) {
      this.eventStoreHealthIndicator.updateStatus({ connection: 'down' });
      this.subsystems.onConnectionFail(e);
      return null;
    }
  }

  public async readFromStream(
    stream: string,
    options?: ReadStreamOptions,
    readableOptions?: ReadableOptions,
  ): Promise<StreamingRead<ResolvedEvent>> {
    try {
      return this.eventStore.readStream(stream, options, readableOptions);
    } catch (e) {
      this.logger.error(`Error while reading a stream`);
      this.subsystems.onConnectionFail(e);
    }
  }

  public async writeEvents(
    stream: string,
    events: EventData[],
    expectedVersion: AppendToStreamOptions = {
      expectedRevision: constants.ANY,
    },
  ): Promise<AppendResult> {
    this.eventsStacker.putEventsInWaitingLine({
      events,
      stream,
      expectedVersion,
    });
    return this.isTryingToWriteEvents
      ? null
      : await this.tryToWriteStackedEventBatches();
  }

  private async tryToWriteStackedEventBatches(): Promise<AppendResult> {
    try {
      let lastValidAppendResult: AppendResult = null;
      this.isTryingToWriteEvents = true;

      while (this.eventsStacker.getEventBatchesWaitingLineLength() > 0) {
        lastValidAppendResult = await this.tryToWriteEventsFromBatch();
      }

      this.isTryingToWriteEvents = false;
      return lastValidAppendResult;
    } catch (e) {
      this.eventStoreHealthIndicator.updateStatus({ connection: 'down' });
      this.subsystems.onConnectionFail(e);
      return null;
    }
  }

  private async tryToWriteEventsFromBatch(): Promise<null | AppendResult> {
    const batch: EventBatch =
      this.eventsStacker.getFirstOutFromEventsBatchesWaitingLine();
    const appendResult: AppendResult = await this.eventStore.appendToStream(
      batch.stream,
      batch.events,
      batch.expectedVersion,
    );
    this.eventsStacker.shiftEventsBatchFromWaitingLine();
    return appendResult;
  }

  private async onEvent(
    subscription: IPersistentSubscriptionConfig,
    payload,
  ): Promise<unknown> {
    return EventHandlerHelper.onEvent(
      this.logger,
      subscription,
      payload,
      this.eventBus,
    );
  }
}
Example #9
Source File: kafka.service.ts    From nestjs-kafka with The Unlicense 4 votes vote down vote up
@Injectable()
export class KafkaService implements OnModuleInit, OnModuleDestroy {

  private kafka: Kafka;
  private producer: Producer;
  private consumer: Consumer;
  private admin: Admin;
  private deserializer: Deserializer;
  private serializer: Serializer;
  private autoConnect: boolean;
  private options: KafkaModuleOption['options'];

  protected topicOffsets: Map<string, (SeekEntry & { high: string; low: string })[]> = new Map();
  
  protected logger = new Logger(KafkaService.name);

  constructor(
    options: KafkaModuleOption['options']
  ) {
    const { 
      client,
      consumer: consumerConfig,
      producer: producerConfig,
    } = options;

    this.kafka = new Kafka({
      ...client,
      logCreator: KafkaLogger.bind(null, this.logger)
    });

    const { groupId } = consumerConfig;
    const consumerOptions = Object.assign(
      {
        groupId: this.getGroupIdSuffix(groupId),
      },
      consumerConfig
    );
    
    this.autoConnect = options.autoConnect ?? true;
    this.consumer = this.kafka.consumer(consumerOptions);
    this.producer = this.kafka.producer(producerConfig);
    this.admin = this.kafka.admin();

    this.initializeDeserializer(options);
    this.initializeSerializer(options);
    this.options = options;
  }

  async onModuleInit(): Promise<void> {
    await this.connect();
    await this.getTopicOffsets();
    SUBSCRIBER_MAP.forEach((functionRef, topic) => {
      this.subscribe(topic);
    });
    this.bindAllTopicToConsumer();
  }

  async onModuleDestroy(): Promise<void> {
    await this.disconnect();
  }

  /**
   * Connect the kafka service.
   */
  async connect(): Promise<void> {
    if (!this.autoConnect) {
      return;
    }

    await this.producer.connect()
    await this.consumer.connect();
    await this.admin.connect();
  }

  /**
   * Disconnects the kafka service.
   */
  async disconnect(): Promise<void> {
    await this.producer.disconnect();
    await this.consumer.disconnect();
    await this.admin.disconnect();
  }

  /**
   * Gets the high, low and partitions of a topic.
   */
  private async getTopicOffsets(): Promise<void> {
    const topics = SUBSCRIBER_MAP.keys();

    for await (const topic of topics) {
      try {
        const topicOffsets = await this.admin.fetchTopicOffsets(topic);
        this.topicOffsets.set(topic, topicOffsets);
      } catch (e) {
        this.logger.error('Error fetching topic offset: ', topic);
      }
    }
  }

  /**
   * Subscribes to the topics.
   * 
   * @param topic 
   */
  private async subscribe(topic: string): Promise<void> {
    await this.consumer.subscribe({
      topic,
      fromBeginning: this.options.consumeFromBeginning || false
    });
  }
  
  /**
   * Send/produce a message to a topic.
   * 
   * @param message 
   */
  async send(message: KafkaMessageSend): Promise<RecordMetadata[]> {
    if (!this.producer) {
      this.logger.error('There is no producer, unable to send message.')
      return;
    }

    const serializedPacket = await this.serializer.serialize(message);

    // @todo - rather than have a producerRecord, 
    // most of this can be done when we create the controller.
    return await this.producer.send(serializedPacket);
  }

  /**
   * Gets the groupId suffix for the consumer.
   * 
   * @param groupId 
   */
  public getGroupIdSuffix(groupId: string): string {
    return groupId + '-client';
  }

  /**
   * Calls the method you are subscribed to.
   * 
   * @param topic
   *  The topic to subscribe to.
   * @param instance 
   *  The class instance.
   */
  subscribeToResponseOf<T>(topic: string, instance: T): void {
    SUBSCRIBER_OBJECT_MAP.set(topic, instance);
  }

  /**
   * Returns a new producer transaction in order to produce messages and commit offsets together
   */
  async transaction(): Promise<KafkaTransaction> {
    const producer = this.producer;
    if (!producer) {
      const msg = 'There is no producer, unable to start transactions.';
      this.logger.error(msg);
      throw msg;
    }

    const tx = await producer.transaction();
    const retval: KafkaTransaction = {
      abort(): Promise<void> {
        return tx.abort();
      },
      commit(): Promise<void> {
        return tx.commit();
      },
      isActive(): boolean {
        return tx.isActive();
      },
      async send(message: KafkaMessageSend): Promise<RecordMetadata[]> {
        const serializedPacket = await this.serializer.serialize(message);
        return await tx.send(serializedPacket);
      },
      sendOffsets(offsets: Offsets & { consumerGroupId: string }): Promise<void> {
        return tx.sendOffsets(offsets);
      },
    };
    return retval;
  }

  /**
   * Commit consumer offsets manually.
   * Please note that in most cases you will want to use the given __autoCommitThreshold__
   * or use a transaction to atomically set offsets and outgoing messages.
   *
   * @param topicPartitions
   */
  async commitOffsets(topicPartitions: Array<TopicPartitionOffsetAndMetadata>): Promise<void> {
    return this.consumer.commitOffsets(topicPartitions);
  }

  /**
   * Sets up the serializer to encode outgoing messages.
   * 
   * @param options 
   */
  protected initializeSerializer(options: KafkaModuleOption['options']): void {
    this.serializer = (options && options.serializer) || new KafkaRequestSerializer();
  }

  /**
   * Sets up the deserializer to decode incoming messages.
   * 
   * @param options 
   */
  protected initializeDeserializer(options: KafkaModuleOption['options']): void {
    this.deserializer = (options && options.deserializer) || new KafkaResponseDeserializer();
  }

  /**
   * Runs the consumer and calls the consumers when a message arrives.
   */
  private bindAllTopicToConsumer(): void {
    const runConfig = (this.options.consumerRunConfig) ? this.options.consumerRunConfig : {};
    this.consumer.run({
      ...runConfig,
      eachMessage: async ({ topic, partition, message }) => {
        const objectRef = SUBSCRIBER_OBJECT_MAP.get(topic);
        const callback = SUBSCRIBER_MAP.get(topic);

        try {
          const { timestamp, response, offset, key, headers } = await this.deserializer.deserialize(message, { topic });
          await callback.apply(objectRef, [response, key, offset, timestamp, partition, headers]);
        } catch(e) {
          this.logger.error(`Error for message ${topic}: ${e}`);

          // Log and throw to ensure we don't keep processing the messages when there is an error.
          throw e;
        }
      },
    });

    if (this.options.seek !== undefined) {
      this.seekTopics();
    }
  }

  /**
   * Seeks to a specific offset defined in the config
   * or to the lowest value and across all partitions.
   */
  private seekTopics(): void {
    Object.keys(this.options.seek).forEach((topic) => {
      const topicOffsets = this.topicOffsets.get(topic);
      const seekPoint = this.options.seek[topic];

      topicOffsets.forEach((topicOffset) => {
        let seek = String(seekPoint);

        // Seek by timestamp
        if (typeof seekPoint == 'object') {
          const time = seekPoint as Date;
          seek = time.getTime().toString();
        }

        // Seek to the earliest timestamp.
        if (seekPoint === 'earliest') {
          seek = topicOffset.low;
        }

        this.consumer.seek({
          topic,
          partition: topicOffset.partition,
          offset: seek
        });
      })
    })
  }
}
Example #10
Source File: sse.service.ts    From office-hours with GNU General Public License v3.0 4 votes vote down vote up
/**
 * T is metadata associated with each Client
 *
 * Low level abstraction for sending SSE to "rooms" of clients.
 * Probably don't use this directly, and wrap it in a service specific to that event source
 *
 * This handles when there's multiple backend instances by assigning unique client ids to each connection.
 * When one instance wants to send to a client, it publishes to a Redis channel for the client.
 * All instances listen to Redis, and if they are the one managing that client, they send the msg.
 *
 * Rooms with client metadata are also maintained in Redis key/value store.
 */
@Injectable()
export class SSEService<T> implements OnModuleDestroy {
  // Clients connected to this instance of the backend
  private directConnnections: Record<string, Connection> = {};

  constructor(private readonly redisService: RedisService) {
    const redisSub = this.redisService.getClient('sub');

    if (!redisSub) {
      Sentry.captureException(ERROR_MESSAGES.sseService.getSubClient);
      throw new Error(ERROR_MESSAGES.sseService.getSubClient);
    }

    // If channel is managed by this instance, send the message to the Response object.
    redisSub.on('message', (channel, message) => {
      const id = /sse::client-(\d+)/.exec(channel);
      if (id && id[1] in this.directConnnections) {
        this.directConnnections[id[1]].res.write(`data: ${message}\n\n`);
      }
    });
  }

  async onModuleDestroy(): Promise<void> {
    // Cleanup all direct connections by removing them from the rooms in redis.
    await each(Object.values(this.directConnnections), async (conn) => {
      await conn.cleanup().catch((err) => {
        console.error(ERROR_MESSAGES.sseService.cleanupConnection);
        console.error(err);
        Sentry.captureException(err);
      });
    }).catch((err) => {
      console.error(ERROR_MESSAGES.sseService.moduleDestroy);
      console.error(err);
      Sentry.captureException(err);
    });
  }

  /**
   * Get redis channel name from client id
   */
  private idToChannel(clientId: number) {
    return `sse::client-${clientId}`;
  }

  /** Add a client to a room */
  async subscribeClient(
    room: string,
    res: Response,
    metadata: T,
  ): Promise<void> {
    const redisSub = this.redisService.getClient('sub');
    const redis = this.redisService.getClient('db');

    if (!redisSub) {
      Sentry.captureException(ERROR_MESSAGES.sseService.getSubClient);
      throw new Error(ERROR_MESSAGES.sseService.getSubClient);
    }
    if (!redis) {
      Sentry.captureException(ERROR_MESSAGES.sseService.getDBClient);
      throw new Error(ERROR_MESSAGES.sseService.getDBClient);
    }

    // Keep track of responses so we can send sse through them
    const clientId = await redis.incr('sse::client::id').catch((err) => {
      console.error(ERROR_MESSAGES.sseService.clientIdSubscribe);
      console.error(err);
      Sentry.captureException(err);
    });
    // Subscribe to the redis channel for this client

    if (!clientId) {
      Sentry.captureException(ERROR_MESSAGES.sseService.clientIdNotFound);
      throw new Error(ERROR_MESSAGES.sseService.clientIdNotFound);
    }

    await redisSub.subscribe(this.idToChannel(clientId)).catch((err) => {
      console.error(ERROR_MESSAGES.sseService.subscribe);
      console.error(err);
      Sentry.captureException(err);
    });

    // Add to room
    const clientInfo = JSON.stringify({
      clientId,
      metadata: metadata,
    } as RedisClientInfo<T>);
    await redis.sadd(room, clientInfo).catch((err) => {
      console.error(err);
      Sentry.captureException(err);
    });

    // Keep track of response object in direct connections
    this.directConnnections[clientId] = {
      res,
      cleanup: async () => {
        // Remove from the redis room
        await redis.srem(room, clientInfo).catch((err) => {
          console.error(ERROR_MESSAGES.sseService.removeFromRoom);
          console.error(err);
        });
        await redisSub.unsubscribe(this.idToChannel(clientId)).catch((err) => {
          console.error(ERROR_MESSAGES.sseService.unsubscribe);
          console.error(err);
          Sentry.captureException(err);
        });
        res.end();
      },
    };

    // Ack so frontend knows we're connected
    res.write('\n');

    // Remove dead connections!
    res.socket.on('end', async () => {
      await this.directConnnections[clientId].cleanup().catch((err) => {
        console.error(ERROR_MESSAGES.sseService.directConnections);
        console.error(err);
        Sentry.captureException(err);
      });
      delete this.directConnnections[clientId];
    });
  }

  /** Send some data to everyone in a room */
  async sendEvent<D>(
    room: string,
    payload: (metadata: T) => Promise<D>,
  ): Promise<void> {
    const redisPub = this.redisService.getClient('pub');
    const redis = this.redisService.getClient('db');

    if (!redisPub) {
      Sentry.captureException(ERROR_MESSAGES.sseService.getPubClient);
      throw new Error(ERROR_MESSAGES.sseService.getPubClient);
    }

    if (!redis) {
      Sentry.captureException(ERROR_MESSAGES.sseService.getDBClient);
      throw new Error(ERROR_MESSAGES.sseService.getDBClient);
    }

    const roomInfo = await redis.smembers(room).catch((err) => {
      console.error(ERROR_MESSAGES.sseService.roomMembers);
      console.error(err);
      Sentry.captureException(err);
    });
    if (room && roomInfo) {
      const clients: RedisClientInfo<T>[] = roomInfo.map((s) => JSON.parse(s));
      console.log(`sending sse to ${clients.length} clients in ${room}`);
      console.time(`sending sse time: `);
      await each(clients, async ({ clientId, metadata }) => {
        const toSend = serialize(
          await payload(metadata).catch((err) => {
            console.error(ERROR_MESSAGES.sseService.serialize);
            console.error(err);
            Sentry.captureException(err);
          }),
        );
        await redisPub
          .publish(this.idToChannel(clientId), toSend)
          .catch((err) => {
            console.error(ERROR_MESSAGES.sseService.publish);
            console.error(err);
            Sentry.captureException(err);
          });
      });
      console.timeEnd(`sending sse time: `);
    }
  }
}
Example #11
Source File: service-bus.module.ts    From pebula-node with MIT License 4 votes vote down vote up
@Module({
  providers: [
    SbDiscoveryFactoryService,
  ],
})
export class ServiceBusModule implements OnModuleInit, OnModuleDestroy {

  /**
   * Register a service bus server/s that will be used as the underlying resources to generate `Queue` & `Subscription` listeners.
   *
   * You can provide multiple server configurations, however make sure that each of them has a unique name.
   * Note that not setting a name is a unique name by itself.
   *
   */
  static register(options: SbModuleRegisterOptions): DynamicModule {
    const providers: Provider[] = [];

    if (Array.isArray(options.servers)) {
      providers.push({
        provide: SB_SERVER_OPTIONS,
        useValue: options.servers,
      });
    } else {
      providers.push({
        provide: SB_SERVER_OPTIONS,
        ...options.servers,
      });
    }

    if (Array.isArray(options.clients)) {
      providers.push({
        provide: SB_CLIENT_OPTIONS,
        useValue: options.clients,
      });
    } else {
      providers.push({
        provide: SB_CLIENT_OPTIONS,
        ...options.clients,
      });
    }

    if (options.metaFactoryProvider) {
      providers.push(normalizeProvider(options.metaFactoryProvider));
    }

    if (Array.isArray(options.providers)) {
      providers.push(...options.providers);
    }

    return { module: ServiceBusModule, providers };
  }

  private discovery: SbDiscoveryService;

  constructor(discoveryFactory: SbDiscoveryFactoryService,
              @Optional() errorHandler?: SbErrorHandler,
              @Optional() @Inject(SB_META_HELPER_FACTORY_TOKEN) metadataHelper?: any,
              @Optional() @Inject(SB_CLIENT_OPTIONS) clientOptions?: SbClientOptions[],
              @Optional() @Inject(SB_SERVER_OPTIONS) serverOptions?: SbServerOptions[]) {
    if (!Array.isArray(serverOptions) || serverOptions.length === 0) {
      throw new Error('You must define at least 1 server, did you use `ServiceBusModule.register()` ?');
    }

    if (errorHandler) {
      sbResourceManager.errorHandler = errorHandler;
    }

    this.discovery = discoveryFactory.create(
      serverOptions,
      !Array.isArray(clientOptions) || clientOptions.length === 0 ? [{}] : clientOptions,
      metadataHelper,
    );

    this.discovery.init();
  }

  async onModuleInit(): Promise<void> {
    await this.discovery.discover();
  }

  async onModuleDestroy(): Promise<void> {
    await this.discovery.destroy();
  }
}
Example #12
Source File: sqs.service.ts    From nestjs-sqs with MIT License 4 votes vote down vote up
@Injectable()
export class SqsService implements OnModuleInit, OnModuleDestroy {
  public readonly consumers = new Map<QueueName, Consumer>();
  public readonly producers = new Map<QueueName, Producer>();

  private readonly logger = new Logger('SqsService', {
    timestamp: false,
  });

  public constructor(
    @Inject(SQS_OPTIONS) public readonly options: SqsOptions,
    private readonly discover: DiscoveryService,
  ) {}

  public async onModuleInit(): Promise<void> {
    const messageHandlers = await this.discover.providerMethodsWithMetaAtKey<SqsMessageHandlerMeta>(
      SQS_CONSUMER_METHOD,
    );
    const eventHandlers = await this.discover.providerMethodsWithMetaAtKey<SqsConsumerEventHandlerMeta>(
      SQS_CONSUMER_EVENT_HANDLER,
    );

    this.options.consumers?.forEach((options) => {
      const { name, ...consumerOptions } = options;
      if (this.consumers.has(name)) {
        throw new Error(`Consumer already exists: ${name}`);
      }

      const metadata = messageHandlers.find(({ meta }) => meta.name === name);
      if (!metadata) {
        this.logger.warn(`No metadata found for: ${name}`);
      }

      const isBatchHandler = metadata.meta.batch === true;
      const consumer = Consumer.create({
        ...consumerOptions,
        ...(isBatchHandler
          ? {
              handleMessageBatch: metadata.discoveredMethod.handler.bind(
                metadata.discoveredMethod.parentClass.instance,
              ),
            }
          : { handleMessage: metadata.discoveredMethod.handler.bind(metadata.discoveredMethod.parentClass.instance) }),
      });

      const eventsMetadata = eventHandlers.filter(({ meta }) => meta.name === name);
      for (const eventMetadata of eventsMetadata) {
        if (eventMetadata) {
          consumer.addListener(
            eventMetadata.meta.eventName,
            eventMetadata.discoveredMethod.handler.bind(metadata.discoveredMethod.parentClass.instance),
          );
        }
      }
      this.consumers.set(name, consumer);
    });

    this.options.producers?.forEach((options) => {
      const { name, ...producerOptions } = options;
      if (this.producers.has(name)) {
        throw new Error(`Producer already exists: ${name}`);
      }

      const producer = Producer.create(producerOptions);
      this.producers.set(name, producer);
    });

    for (const consumer of this.consumers.values()) {
      consumer.start();
    }
  }

  public onModuleDestroy() {
    for (const consumer of this.consumers.values()) {
      consumer.stop();
    }
  }

  private getQueueInfo(name: QueueName) {
    if (!this.consumers.has(name) && !this.producers.has(name)) {
      throw new Error(`Consumer/Producer does not exist: ${name}`);
    }

    const { sqs, queueUrl } = (this.consumers.get(name) ?? this.producers.get(name)) as {
      sqs: AWS.SQS;
      queueUrl: string;
    };
    if (!sqs) {
      throw new Error('SQS instance does not exist');
    }

    return {
      sqs,
      queueUrl,
    };
  }

  public async purgeQueue(name: QueueName) {
    const { sqs, queueUrl } = this.getQueueInfo(name);
    return sqs
      .purgeQueue({
        QueueUrl: queueUrl,
      })
      .promise();
  }

  public async getQueueAttributes(name: QueueName) {
    const { sqs, queueUrl } = this.getQueueInfo(name);
    const response = await sqs
      .getQueueAttributes({
        QueueUrl: queueUrl,
        AttributeNames: ['All'],
      })
      .promise();
    return response.Attributes as { [key in QueueAttributeName]: string };
  }

  public getProducerQueueSize(name: QueueName) {
    if (!this.producers.has(name)) {
      throw new Error(`Producer does not exist: ${name}`);
    }

    return this.producers.get(name).queueSize();
  }

  public send<T = any>(name: QueueName, payload: Message<T> | Message<T>[]) {
    if (!this.producers.has(name)) {
      throw new Error(`Producer does not exist: ${name}`);
    }

    const originalMessages = Array.isArray(payload) ? payload : [payload];
    const messages = originalMessages.map((message) => {
      let body = message.body;
      if (typeof body !== 'string') {
        body = JSON.stringify(body) as any;
      }

      return {
        ...message,
        body,
      };
    });

    const producer = this.producers.get(name);
    return producer.send(messages as any[]);
  }
}
Example #13
Source File: queue.module.ts    From nest-amqp with MIT License 4 votes vote down vote up
@Module({})
export class QueueModule implements OnModuleInit, OnModuleDestroy {
  private static readonly moduleDefinition: DynamicModule = {
    global: false,
    module: QueueModule,
    providers: [AMQPService, QueueService, MetadataScanner, ListenerExplorer, ObjectValidatorService],
    exports: [QueueService],
  };

  public static forRoot(options: QueueModuleOptions): DynamicModule;
  public static forRoot(connectionUri: string): DynamicModule;
  public static forRoot(connectionUri: string, options: Omit<QueueModuleOptions, 'connectionUri'>): DynamicModule;
  public static forRoot(connections: NamedAMQPConnectionOptions[], options?: MultiConnectionQueueModuleOptions): DynamicModule;
  public static forRoot(
    connectionUri: string | QueueModuleOptions | NamedAMQPConnectionOptions[],
    options: Omit<QueueModuleOptions, 'connectionUri'> | MultiConnectionQueueModuleOptions = {},
  ): DynamicModule {
    const queueModuleOptionsProviders = [];
    const connectionProviders = [];
    const connectionOptionsProviders = [];

    if (toString.call(connectionUri) === '[object Array]') {
      queueModuleOptionsProviders.push(QueueModule.getQueueModuleOptionsProvider(options));
      for (const connectionOptions of connectionUri as NamedAMQPConnectionOptions[]) {
        connectionOptionsProviders.push(QueueModule.getAMQPConnectionOptionsProvider(connectionOptions, connectionOptions.name));
        connectionProviders.push(QueueModule.getConnectionProvider(connectionOptions.name));
      }
    } else {
      const moduleOptions = typeof connectionUri === 'string' ? { ...options, connectionUri } : (connectionUri as QueueModuleOptions);
      queueModuleOptionsProviders.push(QueueModule.getQueueModuleOptionsProvider(moduleOptions));
      connectionOptionsProviders.push(QueueModule.getAMQPConnectionOptionsProvider(moduleOptions));
      connectionProviders.push(QueueModule.getConnectionProvider(AMQP_DEFAULT_CONNECTION_TOKEN));
    }

    Object.assign(QueueModule.moduleDefinition, {
      global: !!options.isGlobal,
      providers: [
        ...queueModuleOptionsProviders,
        ...QueueModule.moduleDefinition.providers,
        ...connectionOptionsProviders,
        ...connectionProviders,
      ],
    });

    return QueueModule.moduleDefinition;
  }

  public static forRootAsync(options: QueueModuleAsyncOptions): DynamicModule {
    // TODO - allow for multiple connections
    const connectionProviders = [QueueModule.getConnectionProvider(AMQP_DEFAULT_CONNECTION_TOKEN)];

    const asyncProviders = this.createAsyncProviders(options);

    Object.assign(QueueModule.moduleDefinition, {
      global: !!options.isGlobal,
      imports: options.imports,
      providers: [...asyncProviders, ...QueueModule.moduleDefinition.providers, ...connectionProviders],
    });

    return QueueModule.moduleDefinition;
  }

  public static forFeature(): DynamicModule {
    return QueueModule.moduleDefinition;
  }

  private static createAsyncProviders(options: QueueModuleAsyncOptions): Provider[] {
    if (!options.useClass && !options.useExisting && !options.useFactory) {
      throw new Error('Must provide factory, class or existing provider');
    }

    if (options.useExisting || options.useFactory) {
      return [this.createAsyncQueueModuleOptionsProvider(options), this.createAsyncAMQConnectionsOptionsProvider(options)];
    }

    const useClass = options.useClass as Type<QueueModuleOptionsFactory>;

    return [
      this.createAsyncQueueModuleOptionsProvider(options),
      this.createAsyncAMQConnectionsOptionsProvider(options),
      {
        provide: useClass,
        useClass,
      },
    ];
  }

  private static createAsyncQueueModuleOptionsProvider(options: QueueModuleAsyncOptions): Provider {
    if (options.useFactory) {
      return {
        provide: QUEUE_MODULE_OPTIONS,
        useFactory: options.useFactory,
        inject: options.inject || [],
      };
    }

    const inject = [options.useClass ?? options.useExisting];

    return {
      provide: QUEUE_MODULE_OPTIONS,
      useFactory: async (factory: QueueModuleOptionsFactory): Promise<QueueModuleOptions> => factory.createQueueModuleOptions(),
      inject,
    };
  }

  private static createAsyncAMQConnectionsOptionsProvider(options: QueueModuleAsyncOptions): Provider {
    if (options.useFactory) {
      return {
        provide: getAMQConnectionOptionsToken(AMQP_DEFAULT_CONNECTION_TOKEN),
        inject: options.inject || [],
        useFactory: async (...args: any[]) => {
          const moduleOptions = await options.useFactory(...args);
          const useValue = QueueModule.getConnectionOptions(moduleOptions);

          AMQConnectionOptionsStorage.add(AMQP_DEFAULT_CONNECTION_TOKEN, useValue);

          return moduleOptions;
        },
      };
    }

    const inject = [options.useClass ?? options.useExisting];

    return {
      provide: getAMQConnectionOptionsToken(AMQP_DEFAULT_CONNECTION_TOKEN),
      useFactory: async (optionsFactory: QueueModuleOptionsFactory) => {
        const moduleOptions = await optionsFactory.createQueueModuleOptions();
        const useValue = QueueModule.getConnectionOptions(moduleOptions);

        AMQConnectionOptionsStorage.add(AMQP_DEFAULT_CONNECTION_TOKEN, useValue);

        return moduleOptions;
      },
      inject,
    };
  }

  /**
   * Creates a connection provider with the given name
   *
   * @param {string} connection Name of the connection
   *
   * @returns {Provider} Named Connection provider
   *
   * @private
   * @static
   */
  private static getConnectionProvider(connection: string = AMQP_DEFAULT_CONNECTION_TOKEN): Provider {
    return {
      provide: getAMQConnectionToken(connection),
      useFactory: async (options: AMQPConnectionOptions): Promise<Connection> => AMQPService.createConnection(options, connection),
      inject: [getAMQConnectionOptionsToken(connection)],
    };
  }

  private static getQueueModuleOptionsProvider(options: Partial<QueueModuleOptions>): Provider {
    return {
      provide: QUEUE_MODULE_OPTIONS,
      useValue: options,
    };
  }

  private static getAMQPConnectionOptionsProvider(
    options: AMQPConnectionOptions,
    connection: string = AMQP_DEFAULT_CONNECTION_TOKEN,
  ): Provider {
    const provide = getAMQConnectionOptionsToken(connection);
    const useValue = QueueModule.getConnectionOptions(options);

    AMQConnectionOptionsStorage.add(connection, useValue);

    return { provide, useValue };
  }

  private static getConnectionOptions(options: AMQPConnectionOptions): AMQPConnectionOptions {
    const { connectionOptions, connectionUri, throwExceptionOnConnectionError } = options;

    return {
      connectionUri,
      ...(isDefined(connectionOptions) ? { connectionOptions } : {}),
      ...(isDefined(throwExceptionOnConnectionError) ? { throwExceptionOnConnectionError } : {}),
    };
  }

  constructor(
    @Inject(QUEUE_MODULE_OPTIONS) private readonly moduleOptions: QueueModuleOptions,
    private readonly queueService: QueueService,
    private readonly listenerExplorer: ListenerExplorer,
    private readonly moduleRef: ModuleRef,
  ) {}

  // istanbul ignore next
  public async onModuleInit(): Promise<void> {
    logger.log('initializing queue module');

    if (this.moduleOptions.logger) {
      Logger.overrideLogger(this.moduleOptions.logger);
    }

    // find everything marked with @Listen
    const listeners = this.listenerExplorer.explore();
    await this.attachListeners(listeners);

    AMQPService.eventEmitter.on(AMQP_CONNECTION_RECONNECT, () => {
      logger.log('reattaching receivers to connection');
      this.queueService.clearSenderAndReceiverLinks();
      this.attachListeners(listeners)
        .then(() => logger.log('receivers reattached'))
        .catch(error => logger.error('error while reattaching listeners', error));
    });

    logger.log('queue module initialized');
  }

  public async onModuleDestroy(): Promise<void> {
    logger.log('destroying queue module');

    await this.queueService.shutdown();

    logger.log('queue module destroyed');
  }

  // istanbul ignore next
  private async attachListeners(listeners: Array<ListenerMetadata<unknown>>): Promise<void> {
    // set up listeners
    for (const listener of listeners) {
      logger.debug(`attaching listener for @Listen: ${JSON.stringify(listener)}`);

      // fetch instance from DI framework
      let target: any;
      try {
        target = this.moduleRef.get(listener.target as any, { strict: false });
      } catch (err) {
        if (err instanceof UnknownElementException) {
          target = this.moduleRef.get(listener.targetName, { strict: false });
        } else {
          throw err;
        }
      }

      await this.queueService.listen(listener.source, listener.callback.bind(target), listener.options, listener.connection);
    }
  }
}
Example #14
Source File: puppeteer-core.module.ts    From nest-puppeteer with MIT License 4 votes vote down vote up
@Global()
@Module({})
export class PuppeteerCoreModule
  implements OnApplicationShutdown, OnModuleDestroy {
  constructor(
    @Inject(PUPPETEER_INSTANCE_NAME) private readonly instanceName: string,
    private readonly moduleRef: ModuleRef,
  ) {}
  onApplicationShutdown() {
    return this.onModuleDestroy();
  }

  static forRoot(
    launchOptions: LaunchOptions = DEFAULT_CHROME_LAUNCH_OPTIONS,
    instanceName: string = DEFAULT_PUPPETEER_INSTANCE_NAME,
  ): DynamicModule {
    const instanceNameProvider = {
      provide: PUPPETEER_INSTANCE_NAME,
      useValue: instanceName,
    };

    const browserProvider = {
      provide: getBrowserToken(instanceName),
      async useFactory() {
        return await launch(launchOptions);
      },
    };

    const contextProvider = {
      provide: getContextToken(instanceName),
      async useFactory(browser: Browser) {
        return browser.createIncognitoBrowserContext();
      },
      inject: [getBrowserToken(instanceName)],
    };

    const pageProvider = {
      provide: getPageToken(instanceName),
      async useFactory(context: BrowserContext) {
        return await context.newPage();
      },
      inject: [getContextToken(instanceName)],
    };

    return {
      module: PuppeteerCoreModule,
      providers: [
        instanceNameProvider,
        browserProvider,
        contextProvider,
        pageProvider,
      ],
      exports: [browserProvider, contextProvider, pageProvider],
    };
  }

  static forRootAsync(options: PuppeteerModuleAsyncOptions): DynamicModule {
    const puppeteerInstanceName =
      options.instanceName ?? DEFAULT_PUPPETEER_INSTANCE_NAME;

    const instanceNameProvider = {
      provide: PUPPETEER_INSTANCE_NAME,
      useValue: puppeteerInstanceName,
    };

    const browserProvider = {
      provide: getBrowserToken(puppeteerInstanceName),
      async useFactory(puppeteerModuleOptions: PuppeteerModuleOptions) {
        return await launch(
          puppeteerModuleOptions.launchOptions ?? DEFAULT_CHROME_LAUNCH_OPTIONS,
        );
      },
      inject: [PUPPETEER_MODULE_OPTIONS],
    };

    const contextProvider = {
      provide: getContextToken(puppeteerInstanceName),
      async useFactory(browser: Browser) {
        return await browser.createIncognitoBrowserContext();
      },
      inject: [
        PUPPETEER_MODULE_OPTIONS,
        getBrowserToken(puppeteerInstanceName),
      ],
    };

    const pageProvider = {
      provide: getPageToken(puppeteerInstanceName),
      async useFactory(context: BrowserContext) {
        return await context.newPage();
      },
      inject: [
        PUPPETEER_MODULE_OPTIONS,
        getContextToken(puppeteerInstanceName),
      ],
    };

    const asyncProviders = this.createAsyncProviders(options);

    return {
      module: PuppeteerCoreModule,
      imports: options.imports,
      providers: [
        ...asyncProviders,
        browserProvider,
        contextProvider,
        pageProvider,
        instanceNameProvider,
      ],
      exports: [browserProvider, contextProvider, pageProvider],
    };
  }

  async onModuleDestroy() {
    const browser: Browser = this.moduleRef.get(
      getBrowserToken(this.instanceName),
    );

    if (browser?.isConnected()) await browser.close();
  }

  private static createAsyncProviders(
    options: PuppeteerModuleAsyncOptions,
  ): Provider[] {
    if (options.useExisting || options.useFactory) {
      return [this.createAsyncOptionsProvider(options)];
    } else if (options.useClass) {
      return [
        this.createAsyncOptionsProvider(options),
        {
          provide: options.useClass,
          useClass: options.useClass,
        },
      ];
    } else {
      return [];
    }
  }

  private static createAsyncOptionsProvider(
    options: PuppeteerModuleAsyncOptions,
  ): Provider {
    if (options.useFactory) {
      return {
        provide: PUPPETEER_MODULE_OPTIONS,
        useFactory: options.useFactory,
        inject: options.inject ?? [],
      };
    } else if (options.useExisting) {
      return {
        provide: PUPPETEER_MODULE_OPTIONS,
        async useFactory(optionsFactory: PuppeteerOptionsFactory) {
          return optionsFactory.createPuppeteerOptions();
        },
        inject: [options.useExisting],
      };
    } else if (options.useClass) {
      return {
        provide: PUPPETEER_MODULE_OPTIONS,
        async useFactory(optionsFactory: PuppeteerOptionsFactory) {
          return optionsFactory.createPuppeteerOptions();
        },
        inject: [options.useClass],
      };
    } else {
      throw new Error('Invalid PuppeteerModule options');
    }
  }
}