fs#WriteStream TypeScript Examples

The following examples show how to use fs#WriteStream. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: utils.ts    From polkadot-watcher-csv-exporter with Apache License 2.0 6 votes vote down vote up
initWriteFileStream = (dirPath: string,fileName: string,logger: Logger): WriteStream => {

  const filePath = `${dirPath}/${fileName}`;
  const file = fs.createWriteStream(filePath);
  file.on('error', function(err) { logger.error(err.stack) });

  return file
}
Example #2
Source File: GraphQLStateGenerator.ts    From graphql-ts-client with MIT License 6 votes vote down vote up
protected createFetcheWriter(
        modelType: GraphQLObjectType | GraphQLInterfaceType | GraphQLUnionType,
        ctx: FetcherContext,
        stream: WriteStream,
        config: GeneratorConfig
    ): GraphQLStateFetcherWriter {
        return new GraphQLStateFetcherWriter(
            modelType,
            ctx,
            stream,
            config
        );
    }
Example #3
Source File: node.ts    From hoprnet with GNU General Public License v3.0 6 votes vote down vote up
function createEchoReplier(remoteIdentityname: string, pipeFileStream?: WriteStream) {
  return (source: Stream['source']) => {
    return (async function* () {
      for await (const encodedMsg of source) {
        const decodedMsg = decodeMsg(encodedMsg.slice())
        const replyMsg = `echo: ${decodedMsg}`

        console.log(`received message '${decodedMsg}' from ${remoteIdentityname}`)
        console.log(`replied with ${replyMsg}`)

        if (pipeFileStream) {
          pipeFileStream.write(`<${remoteIdentityname}: ${decodedMsg}\n`)
          pipeFileStream.write(`>${remoteIdentityname}: ${replyMsg}\n`)
        }
        yield encodeMsg(replyMsg)
      }
    })()
  }
}
Example #4
Source File: Generator.ts    From graphql-ts-client with MIT License 6 votes vote down vote up
protected createFetcheWriter(
        modelType: GraphQLObjectType | GraphQLInterfaceType | GraphQLUnionType,
        ctx: FetcherContext,
        stream: WriteStream,
        config: GeneratorConfig
    ): FetcherWriter {
        return new FetcherWriter(
            modelType,
            ctx,
            stream,
            config
        );
    }
Example #5
Source File: csvWriter.ts    From polkadot-watcher-csv-exporter with Apache License 2.0 6 votes vote down vote up
_writeFileValidatorSession = (file: WriteStream, request: WriteValidatorCSVRequest): void => {
  const { eraIndex, sessionIndex, blockNumber, myValidatorStaking, myWaitingValidatorStaking, totalIssuance, validatorRewardsPreviousEra } = request
  file.write(`era,session,block_number,active,name,stash_address,controller_address,commission_percent,self_stake,total_stake,num_stakers,stakers,num_voters,voters,era_points,total_issuance,validator_rewards_previous_era\n`);
  for (const staking of myValidatorStaking) {
    file.write(`${eraIndex},${sessionIndex ? sessionIndex : -1},${blockNumber ? blockNumber : -1},${1},${staking.displayName},${staking.accountId},${staking.controllerId},${(parseInt(staking.validatorPrefs.commission.toString()) / 10000000).toFixed(2)},${staking.exposure.own},${staking.exposure.total},${staking.exposure.others.length},"${staking.exposure.others.map(staker=>staker.who+';'+staker.value).join(`,`)}",${staking.voters.length},"${staking.voters.map(staker=>staker.address+';'+staker.value).join(`,`)}",${staking.eraPoints},${totalIssuance},${validatorRewardsPreviousEra}\n`);
  }
  if(myWaitingValidatorStaking){
    // total vs active: polkadojs is displaying total as the total and the own stake for the waiting set validators
    for (const staking of myWaitingValidatorStaking) {
      file.write(`${eraIndex},${sessionIndex ? sessionIndex : -1},${blockNumber ? blockNumber : -1},${0},${staking.displayName},${staking.accountId},${staking.controllerId},${(parseInt(staking.validatorPrefs.commission.toString()) / 10000000).toFixed(2)},${staking.stakingLedger.total},${staking.stakingLedger.total},${staking.exposure.others.length},"${staking.exposure.others.map(staker=>staker.who+';'+staker.value).join(`,`)}",${staking.voters.length},"${staking.voters.map(staker=>staker.address+';'+staker.value).join(`,`)}",${staking.eraPoints},${totalIssuance},${validatorRewardsPreviousEra}\n`);
    }
  }
}
Example #6
Source File: AsyncGenerator.ts    From graphql-ts-client with MIT License 5 votes vote down vote up
protected writeIndexCode(stream: WriteStream, schema: GraphQLSchema) {
        stream.write(`export type { GraphQLExecutor } from "./Async";\n`);
        stream.write(`export { setGraphQLExecutor, execute } from "./Async";\n`);
        super.writeIndexCode(stream, schema);
    }
Example #7
Source File: replay.tracker.ts    From diablo2 with MIT License 5 votes vote down vote up
_stream: WriteStream | null;
Example #8
Source File: project.spec.ts    From cli with Apache License 2.0 5 votes vote down vote up
doMockCreateWriteStream = () => {
  mockedCreateWriteStream.mockImplementation(() => {
    const writableStream = new Writable();
    writableStream._write = (_chunk, _encoding, next) => next();
    process.nextTick(() => writableStream.emit('close'));
    return writableStream as unknown as WriteStream;
  });
}
Example #9
Source File: node.ts    From hoprnet with GNU General Public License v3.0 5 votes vote down vote up
async function startNode(
  {
    peerId,
    port,
    pipeFileStream
  }: {
    peerId: PeerId
    port: number
    pipeFileStream?: WriteStream
  },
  options: HoprConnectConfig
) {
  console.log(
    `starting node, bootstrap address ${
      options.config?.initialNodes != undefined && options.config?.initialNodes.length > 0
        ? options.config.initialNodes[0].id.toB58String()
        : 'undefined'
    }`
  )

  const node = await libp2p.create({
    peerId,
    addresses: {
      listen: [`/ip4/0.0.0.0/tcp/${port}/p2p/${peerId.toB58String()}`]
    },
    modules: {
      transport: [HoprConnect as any],
      streamMuxer: [Mplex],
      connEncryption: [NOISE]
    },
    config: {
      transport: {
        HoprConnect: options
      },
      peerDiscovery: {
        autoDial: false
      },
      relay: {
        // Conflicts with HoprConnect's own mechanism
        enabled: false
      },
      nat: {
        // Conflicts with HoprConnect's own mechanism
        enabled: false
      }
    }
  })

  async function identityNameForConnection(connection?: Connection): Promise<string> {
    if (!connection) {
      return 'unknown'
    }
    return identityFromPeerId(connection.remotePeer)
  }

  node.handle(TEST_PROTOCOL, async (conn: HandlerProps) => {
    pipe(
      conn.stream.source,
      createEchoReplier(await identityNameForConnection(conn.connection), pipeFileStream),
      conn.stream.sink
    )
  })

  await node.start()
  console.log(`node started`)
  return node
}
Example #10
Source File: index.ts    From electron-request with MIT License 5 votes vote down vote up
/**
   * Download file to destination
   * @param {WriteStream} fileOut  Download write stream
   * @param {ProgressCallback=} onProgress Download progress callback
   */
  public download = async (
    fileOut: Writable,
    onProgress?: ProgressCallback,
    validateOptions?: ValidateOptions,
  ): Promise<void> => {
    const feedStreams: Writable[] = [];

    if (typeof onProgress === 'function') {
      const contentLength = Number(this.config.headers.get(HEADER_MAP.CONTENT_LENGTH));
      feedStreams.push(new ProgressCallbackTransform(contentLength, onProgress));
    }

    if (validateOptions) {
      feedStreams.push(new DigestTransform(validateOptions));
    }

    feedStreams.push(fileOut);

    return new Promise((resolve, reject) => {
      let lastStream = this.stream;
      for (const stream of feedStreams) {
        stream.on('error', (error: Error) => {
          reject(error);
        });
        lastStream = lastStream.pipe(stream);
      }

      fileOut.once('finish', () => {
        if (fileOut instanceof WriteStream && typeof fileOut.close === 'function') {
          fileOut.close();
        }
        resolve();
      });
    });
  };
Example #11
Source File: csvWriter.ts    From polkadot-watcher-csv-exporter with Apache License 2.0 5 votes vote down vote up
_writeFileNominatorSession = (file: WriteStream, request: WriteNominatorCSVRequest): void => {
  const { eraIndex, sessionIndex, blockNumber, nominatorStaking } = request
  file.write(`era,session,block_number,stash_address,controller_address,bonded_amount,num_targets,targets\n`);
  for (const staking of nominatorStaking) {
    const numTargets = staking.nominators ? staking.nominators.length : 0;
    file.write(`${eraIndex},${sessionIndex},${blockNumber},${staking.accountId},${staking.controllerId},${staking.stakingLedger.total},${numTargets},"${staking.nominators.join(`,`)}"\n`);
  }
}
Example #12
Source File: ApolloGenerator.d.ts    From graphql-ts-client with MIT License 5 votes vote down vote up
protected writeIndexCode(stream: WriteStream, schema: GraphQLSchema): void;
Example #13
Source File: index.ts    From Dimensions with MIT License 5 votes vote down vote up
errorLogWriteStream: WriteStream = null;
Example #14
Source File: CommonTypesWriter.d.ts    From graphql-ts-client with MIT License 5 votes vote down vote up
constructor(schema: GraphQLSchema, inheritanceInfo: InheritanceInfo, stream: WriteStream, config: GeneratorConfig);
Example #15
Source File: FsStrategy.ts    From Designer-Server with GNU General Public License v3.0 5 votes vote down vote up
createWriteStream = (path: string): { stream: WriteStream, path: string } => {
        return {
            stream: createWriteStream(propertyConfigs.uploadDist.localPath + "/" + path),
            path: propertyConfigs.uploadDist.localPath + "/" + path
        }
    }
Example #16
Source File: AsyncGenerator.d.ts    From graphql-ts-client with MIT License 5 votes vote down vote up
protected writeIndexCode(stream: WriteStream, schema: GraphQLSchema): void;
Example #17
Source File: service.ts    From cardano-launcher with Apache License 2.0 4 votes vote down vote up
/**
 * Initialise a [[Service]] which can control the lifetime of a
 * backend process.
 *
 * This does not start the process. Use [[Service.start]] for that.
 *
 * @param cfgPromise - a promise which will return the command to run.
 * @param logger - logging object.
 * @param childProcessLogWriteStream - WriteStream for writing the child process data events from stdout and stderr.
 * @return A handle on the [[Service]].
 */
export function setupService(
  cfgPromise: Promise<StartService>,
  logger: Logger = console,
  childProcessLogWriteStream?: WriteStream
): Service {
  const events = new ServiceEvents();
  // What the current state is.
  let status = ServiceStatus.NotStarted;
  // Fulfilled promise of service command-line.
  // This will always be defined if status > Starting.
  let cfg: StartService;
  // NodeJS child process object, or null if not running.
  let proc: ChildProcess | null = null;
  // Pipe file descriptor for clean shutdown, or null if not yet running.
  let shutdownFD: number | null;
  // When the service started (milliseconds since epoch)
  let startTimeMs = 0;
  // How the child process exited, or null if it hasn't yet exited.
  let exitStatus: ServiceExitStatus | null;
  // For cancelling the kill timeout.
  let killTimer: NodeJS.Timeout | null = null;
  let startPromise: Promise<Pid>;

  const setStatus = (newStatus: ServiceStatus): void => {
    logger.debug(
      `setStatus ${ServiceStatus[status]} -> ${ServiceStatus[newStatus]}`
    );
    status = newStatus;
    if (status === ServiceStatus.Started) {
      startTimeMs = Date.now();
    }
    events.statusChanged(status);
  };

  const onStopped = (
    code: number | null = null,
    signal: string | null = null,
    err: Error | null = null
  ): void => {
    exitStatus = { exe: cfg.command, code, signal, err };
    logger.debug(`Service onStopped`, exitStatus);
    if (killTimer) {
      clearTimeout(killTimer);
      killTimer = null;
    }
    proc = null;
    setStatus(ServiceStatus.Stopped);
  };

  const doStart = async (): Promise<Pid> => {
    const envStr = _.map(
      cfg.extraEnv,
      (value, name) => `${name}=${value} `
    ).join('');
    const commandStr = `${envStr}${cfg.command} ${cfg.args.join(' ')}`;
    logger.info(`Service.start: trying to start ${commandStr}`, cfg);
    const stdOuts = childProcessLogWriteStream ? 'pipe' : 'inherit';
    const stdio = [
      cfg.shutdownMethod === ShutdownMethod.CloseStdin ? 'pipe' : 'ignore',
      stdOuts,
      stdOuts,
    ].concat(
      cfg.shutdownMethod === ShutdownMethod.CloseFD ? ['pipe'] : []
    ) as StdioOptions;
    const cwd = cfg.cwd ? { cwd: cfg.cwd } : {};
    const env = cfg.extraEnv
      ? Object.assign({}, process.env, cfg.extraEnv)
      : process.env;
    const options = Object.assign({ stdio }, cwd, { env });
    try {
      proc = spawn(cfg.command, cfg.args, options);
    } catch (err) {
      logger.error(`Service.start: child_process.spawn() failed: ${err}`);
      logger.error(
        `Service.start: child_process.spawn(${cfg.command}, ${cfg.args.join(
          ' '
        )}, ...)`,
        options
      );
      throw err;
    }
    if (cfg.shutdownMethod === ShutdownMethod.CloseStdin) {
      // corresponds to first element of `stdio` above
      shutdownFD = 0;
    } else if (cfg.shutdownMethod === ShutdownMethod.CloseFD) {
      // corresponds to last element of `stdio` above
      shutdownFD = cleanShutdownFD;
    }
    setStatus(ServiceStatus.Started);
    proc.on('exit', (code, signal) => {
      onStopped(code, signal);
    });
    proc.on('error', err => {
      logger.error(`Service.start: child_process failed: ${err}`);
      onStopped(null, null, err);
    });
    if (proc.stdout && proc.stderr && childProcessLogWriteStream) {
      proc.stdout.on('data', data => {
        childProcessLogWriteStream.write(data);
      });
      proc.stderr.on('data', data => {
        childProcessLogWriteStream.write(data);
      });
    }
    return proc.pid as number;
  };

  const doStop = (timeoutSeconds: number): void => {
    logger.info(`Service.stop: trying to stop ${cfg.command}`, cfg);
    setStatus(ServiceStatus.Stopping);
    if (proc) {
      if (cfg.shutdownMethod === ShutdownMethod.Signal) {
        proc.kill('SIGTERM');
      } else if (shutdownFD !== null && proc.stdio[shutdownFD]) {
        const stream = proc.stdio[shutdownFD] as Writable;
        const closeFD = (): void => {
          stream.end();
        };

        // Allow the service one second after startup to begin reading from its
        // shutdownFD, before closing the shutdown FD.
        const shutdownFDGracePeriodMs = 1000;
        const grace = startTimeMs - Date.now() + shutdownFDGracePeriodMs;
        if (grace > 0) {
          setTimeout(closeFD, grace);
        } else {
          closeFD();
        }
      }
    }
    killTimer = setTimeout(() => {
      if (proc) {
        logger.info(
          `Service.stop: timed out after ${timeoutSeconds} seconds. Killing process ${proc.pid}.`
        );
        proc.kill('SIGKILL');
      }
    }, timeoutSeconds * 1000);
  };

  const waitForStop = (): Promise<ServiceExitStatus> =>
    new Promise(resolve => {
      logger.debug(`Service.stop: waiting for ServiceStatus.Stopped`);
      events.on('statusChanged', status => {
        if (status === ServiceStatus.Stopped && exitStatus) {
          resolve(exitStatus);
        }
      });
    });

  const waitForExit = (): Promise<ServiceExitStatus> => {
    const defaultExitStatus = {
      exe: cfg ? cfg.command : '',
      code: null,
      signal: null,
      err: null,
    };
    switch (status) {
      case ServiceStatus.NotStarted:
      case ServiceStatus.Starting:
        return new Promise(resolve => {
          status = ServiceStatus.Stopped;
          exitStatus = defaultExitStatus;
          resolve(exitStatus);
        });
      case ServiceStatus.Started:
        return waitForStop();
      case ServiceStatus.Stopping:
        return waitForStop();
      case ServiceStatus.Stopped:
        return new Promise(resolve => resolve(exitStatus || defaultExitStatus));
    }
  };

  return {
    start: async (): Promise<Pid> => {
      switch (status) {
        case ServiceStatus.NotStarted:
          setStatus(ServiceStatus.Starting);
          startPromise = cfgPromise.then(theCfg => {
            cfg = theCfg;
            return doStart();
          });
          return startPromise;
        case ServiceStatus.Starting:
          logger.info(`Service.start: already starting`);
          return startPromise;
        case ServiceStatus.Started:
          logger.info(`Service.start: already started`);
          return proc?.pid || -1;
        case ServiceStatus.Stopping:
          logger.info(`Service.start: cannot start - already stopping`);
          return -1;
        case ServiceStatus.Stopped:
          logger.info(`Service.start: cannot start - already stopped`);
          return -1;
      }
    },
    stop: async (
      timeoutSeconds = defaultTimeoutSeconds
    ): Promise<ServiceExitStatus> => {
      switch (status) {
        case ServiceStatus.NotStarted:
        case ServiceStatus.Starting:
          logger.info(`Service.stop: cannot stop - never started`);
          break;
        case ServiceStatus.Started:
          doStop(timeoutSeconds);
          break;
        case ServiceStatus.Stopping:
          if (timeoutSeconds === 0 && proc) {
            logger.info(
              `Service.stop: was already stopping, but will now kill process ${proc.pid} immediately`
            );
            proc.kill('SIGKILL');
          } else {
            logger.info(`Service.stop: already stopping`);
          }
          break;
        case ServiceStatus.Stopped:
          logger.info(`Service.stop: already stopped`);
          break;
      }
      return waitForExit();
    },
    waitForExit,
    getStatus: (): ServiceStatus => status,
    getProcess: (): ChildProcess | null => proc,
    getConfig: (): StartService | null => cfg,
    events,
  };
}
Example #18
Source File: FetcherWriter.ts    From graphql-ts-client with MIT License 4 votes vote down vote up
constructor(
        protected modelType: GraphQLObjectType | GraphQLInterfaceType | GraphQLUnionType,
        protected ctx: FetcherContext,
        stream: WriteStream,
        config: GeneratorConfig
    ) {
        super(stream, config);

        this.fetcherTypeName = `${this.modelType.name}${config.fetcherSuffix ?? "Fetcher"}`;

        if (modelType instanceof GraphQLUnionType) {
            const map: { [key: string]: GraphQLField<any, any> } = {};
            const itemCount = modelType.getTypes().length;
            if (itemCount !== 0) {
                const fieldCountMap = new Map<string, number>();
                for (const type of modelType.getTypes()) {
                    for (const fieldName in type.getFields()) {
                        fieldCountMap.set(fieldName, (fieldCountMap.get(fieldName) ?? 0) + 1);
                    }
                }
                const firstTypeFieldMap = modelType.getTypes()[0].getFields();
                for (const fieldName in firstTypeFieldMap) {
                    if (fieldCountMap.get(fieldName) === itemCount) {
                        map[fieldName] = firstTypeFieldMap[fieldName]!;
                    }
                }
            }
            this.fieldMap = map;
        } else {
            this.fieldMap = modelType.getFields();
        }
      
        const fieldArgsMap = new Map<string, GraphQLArgument[]>();
        const fieldCategoryMap = new Map<string, string>();
        const defaultFetcherProps: string[] = [];
        this.hasArgs = false;
        for (const fieldName in this.fieldMap) {
            const field = this.fieldMap[fieldName]!;
            const targetType = targetTypeOf(field.type);
            if (this.modelType.name !== "Query" && 
            this.modelType.name !== "Mutation" && 
            targetType === undefined && 
            field.args.length === 0 &&
            !field.isDeprecated) {
                if (config.defaultFetcherExcludeMap !== undefined) {
                    const excludeProps = config.defaultFetcherExcludeMap[modelType.name];
                    if (excludeProps !== undefined && excludeProps.filter(name => name === fieldName).length !== 0) {
                        continue;
                    }
                }
                defaultFetcherProps.push(fieldName);
            }

            if (field.args.length !== 0) {
                fieldArgsMap.set(fieldName, field.args);
            }

            const fieldCoreType = 
                field.type instanceof GraphQLNonNull ?
                field.type.ofType :
                field.type;
            if (this.ctx.embeddedTypes.has(fieldCoreType)) {
                fieldCategoryMap.set(fieldName, "SCALAR");
            } else if (this.ctx.connections.has(fieldCoreType)) {
                fieldCategoryMap.set(fieldName, "CONNECTION");
            } else if (fieldCoreType instanceof GraphQLList) {
                const elementType = 
                    fieldCoreType.ofType instanceof GraphQLNonNull ?
                    fieldCoreType.ofType.ofType :
                    fieldCoreType.ofType;
                if (elementType instanceof GraphQLObjectType ||
                    elementType instanceof GraphQLInterfaceType ||
                    elementType instanceof GraphQLUnionType
                ) {
                    fieldCategoryMap.set(fieldName, "LIST");
                }
            } else if (fieldCoreType instanceof GraphQLObjectType ||
                fieldCoreType instanceof GraphQLInterfaceType ||
                fieldCoreType instanceof GraphQLUnionType
            ) {
                fieldCategoryMap.set(fieldName, "REFERENCE");
            } else if (this.ctx.idFieldMap.get(this.modelType) === field) {
                fieldCategoryMap.set(fieldName, "ID");
            } else {
                fieldCategoryMap.set(fieldName, "SCALAR");
            }

            if (field.args.length !== 0) {
                this.hasArgs = true;
            }
        }

        this.defaultFetcherProps = defaultFetcherProps;
        this.fieldArgsMap = fieldArgsMap;
        this.fieldCategoryMap = fieldCategoryMap;
        let prefix = instancePrefix(this.modelType.name);
        this.emptyFetcherName = `${prefix}$`;
        this.defaultFetcherName = defaultFetcherProps.length !== 0 ? `${prefix}$$` : undefined;
    }
Example #19
Source File: index.ts    From Dimensions with MIT License 4 votes vote down vote up
/**
   * Run a custom match. A custom match much print to stdout all relevant data to be used by the engine and
   * Dimensions framework. All output after the conclude command from {@link Design.OverrideOptions} is outputted
   * is stored as a list of new line delimited strings and returned as the match results. The match must exit with
   * exit code 0 to be marked as succesfully complete and the processing of results stops and this function resolves
   * @param match - the match to run
   */
  public runCustom(match: Match): Promise<Array<string>> {
    return new Promise((resolve, reject) => {
      if (this.overrideOptions.active == false) {
        reject(
          new FatalError(
            'Override was not set active! Make sure to set the overide.active field to true'
          )
        );
      }
      const cmd = this.overrideOptions.command;

      const parsed = this.parseCustomArguments(
        match,
        this.overrideOptions.arguments
      );

      // spawn the match process with the parsed arguments
      let matchProcessTimer: any;

      // TODO: configure some kind of secureMode for custom matches

      match.matchProcess = spawn(cmd, parsed).on('error', (err) => {
        if (err) throw err;
      });
      this.log.system(
        `${match.name} | id: ${match.id} - spawned: ${cmd} ${parsed.join(' ')}`
      );

      const errorLogFilepath = path.join(
        match.getMatchErrorLogDirectory(),
        `match_error.log`
      );
      let errorLogWriteStream: WriteStream = null;

      if (match.configs.storeErrorLogs) {
        errorLogWriteStream = fs.createWriteStream(errorLogFilepath);
      }

      // pipe stderr of match process to error log file if enabled
      if (match.configs.storeErrorLogs) {
        errorLogWriteStream.write('=== Custom Match Error Log ===\n');
        match.matchProcess.stderr.pipe(errorLogWriteStream);
      }

      let matchTimedOut = false;
      // set up timer if specified
      if (this.overrideOptions.timeout !== null) {
        matchProcessTimer = setTimeout(() => {
          this.log.system(`${match.name} | id: ${match.id} - Timed out`);
          match.matchProcess.kill('SIGKILL');
          matchTimedOut = true;
        }, this.overrideOptions.timeout);
      }

      let processingStage = false;
      match.matchProcess.stdout.on('readable', () => {
        let data: string[];
        while ((data = match.matchProcess.stdout.read())) {
          // split chunks into line by line and handle each line of output
          const strs = `${data}`.split(/\r?\n/);
          for (let i = 0; i < strs.length; i++) {
            const str = strs[i];

            // skip empties
            if (str === '') continue;

            // if we reached conclude command, default being D_MATCH_FINISHED, we start the processing stage
            if (str === this.overrideOptions.conclude_command) {
              processingStage = true;
            }
            // else if we aren't in the processing stage
            else if (!processingStage) {
              // store all stdout
              match.state.matchOutput.push(str);
            }
            // otherwise we are in processing stage
            else {
              // store into results
              match.results.push(str);
            }
          }
        }
      });

      match.matchProcess.stdout.on('close', (code) => {
        this.log.system(
          `${match.name} | id: ${match.id} - exited with code ${code}`
        );
        if (matchTimedOut) {
          reject(new MatchError('Match timed out'));
        } else {
          clearTimeout(matchProcessTimer);
          resolve(match.results);
        }
        // remove the agent files if on secureMode and double check it is the temporary directory
        match.agents.forEach((agent) => {
          if (agent.options.secureMode) {
            const tmpdir = os.tmpdir();
            if (agent.cwd.slice(0, tmpdir.length) === tmpdir) {
              removeDirectorySync(agent.cwd);
            } else {
              this.log.error(
                "couldn't remove agent files while in secure mode"
              );
            }
          }
        });
      });
    });
  }