12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583 |
- // @ts-check
- const fs = require('fs');
- const http = require('http');
- const path = require('path');
- const url = require('url');
- const cors = require('cors');
- const dotenv = require('dotenv');
- const express = require('express');
- const { Redis } = require('ioredis');
- const { JSDOM } = require('jsdom');
- const pg = require('pg');
- const dbUrlToConfig = require('pg-connection-string').parse;
- const WebSocket = require('ws');
- const { logger, httpLogger, initializeLogLevel, attachWebsocketHttpLogger, createWebsocketLogger } = require('./logging');
- const { setupMetrics } = require('./metrics');
- const { isTruthy, normalizeHashtag, firstParam } = require("./utils");
- const environment = process.env.NODE_ENV || 'development';
- // Correctly detect and load .env or .env.production file based on environment:
- const dotenvFile = environment === 'production' ? '.env.production' : '.env';
- dotenv.config({
- path: path.resolve(__dirname, path.join('..', dotenvFile))
- });
- initializeLogLevel(process.env, environment);
- /**
- * Declares the result type for accountFromToken / accountFromRequest.
- *
- * Note: This is here because jsdoc doesn't like importing types that
- * are nested in functions
- * @typedef ResolvedAccount
- * @property {string} accessTokenId
- * @property {string[]} scopes
- * @property {string} accountId
- * @property {string[]} chosenLanguages
- * @property {string} deviceId
- */
- /**
- * @param {RedisConfiguration} config
- * @returns {Promise<Redis>}
- */
- const createRedisClient = async ({ redisParams, redisUrl }) => {
- let client;
- if (typeof redisUrl === 'string') {
- client = new Redis(redisUrl, redisParams);
- } else {
- client = new Redis(redisParams);
- }
- client.on('error', (err) => logger.error({ err }, 'Redis Client Error!'));
- return client;
- };
- /**
- * Attempts to safely parse a string as JSON, used when both receiving a message
- * from redis and when receiving a message from a client over a websocket
- * connection, this is why it accepts a `req` argument.
- * @param {string} json
- * @param {any?} req
- * @returns {Object.<string, any>|null}
- */
- const parseJSON = (json, req) => {
- try {
- return JSON.parse(json);
- } catch (err) {
- /* FIXME: This logging isn't great, and should probably be done at the
- * call-site of parseJSON, not in the method, but this would require changing
- * the signature of parseJSON to return something akin to a Result type:
- * [Error|null, null|Object<string,any}], and then handling the error
- * scenarios.
- */
- if (req) {
- if (req.accountId) {
- req.log.error({ err }, `Error parsing message from user ${req.accountId}`);
- } else {
- req.log.error({ err }, `Error parsing message from ${req.remoteAddress}`);
- }
- } else {
- logger.error({ err }, `Error parsing message from redis`);
- }
- return null;
- }
- };
- /**
- * Takes an environment variable that should be an integer, attempts to parse
- * it falling back to a default if not set, and handles errors parsing.
- * @param {string|undefined} value
- * @param {number} defaultValue
- * @param {string} variableName
- * @returns {number}
- */
- const parseIntFromEnv = (value, defaultValue, variableName) => {
- if (typeof value === 'string' && value.length > 0) {
- const parsedValue = parseInt(value, 10);
- if (isNaN(parsedValue)) {
- throw new Error(`Invalid ${variableName} environment variable: ${value}`);
- }
- return parsedValue;
- } else {
- return defaultValue;
- }
- };
- /**
- * @param {NodeJS.ProcessEnv} env the `process.env` value to read configuration from
- * @returns {pg.PoolConfig} the configuration for the PostgreSQL connection
- */
- const pgConfigFromEnv = (env) => {
- /** @type {Record<string, pg.PoolConfig>} */
- const pgConfigs = {
- development: {
- user: env.DB_USER || pg.defaults.user,
- password: env.DB_PASS || pg.defaults.password,
- database: env.DB_NAME || 'mastodon_development',
- host: env.DB_HOST || pg.defaults.host,
- port: parseIntFromEnv(env.DB_PORT, pg.defaults.port ?? 5432, 'DB_PORT')
- },
- production: {
- user: env.DB_USER || 'mastodon',
- password: env.DB_PASS || '',
- database: env.DB_NAME || 'mastodon_production',
- host: env.DB_HOST || 'localhost',
- port: parseIntFromEnv(env.DB_PORT, 5432, 'DB_PORT')
- },
- };
- /**
- * @type {pg.PoolConfig}
- */
- let baseConfig = {};
- if (env.DATABASE_URL) {
- const parsedUrl = dbUrlToConfig(env.DATABASE_URL);
- // The result of dbUrlToConfig from pg-connection-string is not type
- // compatible with pg.PoolConfig, since parts of the connection URL may be
- // `null` when pg.PoolConfig expects `undefined`, as such we have to
- // manually create the baseConfig object from the properties of the
- // parsedUrl.
- //
- // For more information see:
- // https://github.com/brianc/node-postgres/issues/2280
- //
- // FIXME: clean up once brianc/node-postgres#3128 lands
- if (typeof parsedUrl.password === 'string') baseConfig.password = parsedUrl.password;
- if (typeof parsedUrl.host === 'string') baseConfig.host = parsedUrl.host;
- if (typeof parsedUrl.user === 'string') baseConfig.user = parsedUrl.user;
- if (typeof parsedUrl.port === 'string') {
- const parsedPort = parseInt(parsedUrl.port, 10);
- if (isNaN(parsedPort)) {
- throw new Error('Invalid port specified in DATABASE_URL environment variable');
- }
- baseConfig.port = parsedPort;
- }
- if (typeof parsedUrl.database === 'string') baseConfig.database = parsedUrl.database;
- if (typeof parsedUrl.options === 'string') baseConfig.options = parsedUrl.options;
- // The pg-connection-string type definition isn't correct, as parsedUrl.ssl
- // can absolutely be an Object, this is to work around these incorrect
- // types, including the casting of parsedUrl.ssl to Record<string, any>
- if (typeof parsedUrl.ssl === 'boolean') {
- baseConfig.ssl = parsedUrl.ssl;
- } else if (typeof parsedUrl.ssl === 'object' && !Array.isArray(parsedUrl.ssl) && parsedUrl.ssl !== null) {
- /** @type {Record<string, any>} */
- const sslOptions = parsedUrl.ssl;
- baseConfig.ssl = {};
- baseConfig.ssl.cert = sslOptions.cert;
- baseConfig.ssl.key = sslOptions.key;
- baseConfig.ssl.ca = sslOptions.ca;
- baseConfig.ssl.rejectUnauthorized = sslOptions.rejectUnauthorized;
- }
- // Support overriding the database password in the connection URL
- if (!baseConfig.password && env.DB_PASS) {
- baseConfig.password = env.DB_PASS;
- }
- } else if (Object.hasOwnProperty.call(pgConfigs, environment)) {
- baseConfig = pgConfigs[environment];
- if (env.DB_SSLMODE) {
- switch(env.DB_SSLMODE) {
- case 'disable':
- case '':
- baseConfig.ssl = false;
- break;
- case 'no-verify':
- baseConfig.ssl = { rejectUnauthorized: false };
- break;
- default:
- baseConfig.ssl = {};
- break;
- }
- }
- } else {
- throw new Error('Unable to resolve postgresql database configuration.');
- }
- return {
- ...baseConfig,
- max: parseIntFromEnv(env.DB_POOL, 10, 'DB_POOL'),
- connectionTimeoutMillis: 15000,
- // Deliberately set application_name to an empty string to prevent excessive
- // CPU usage with PG Bouncer. See:
- // - https://github.com/mastodon/mastodon/pull/23958
- // - https://github.com/pgbouncer/pgbouncer/issues/349
- application_name: '',
- };
- };
- /**
- * @typedef RedisConfiguration
- * @property {import('ioredis').RedisOptions} redisParams
- * @property {string} redisPrefix
- * @property {string|undefined} redisUrl
- */
- /**
- * @param {NodeJS.ProcessEnv} env the `process.env` value to read configuration from
- * @returns {RedisConfiguration} configuration for the Redis connection
- */
- const redisConfigFromEnv = (env) => {
- // ioredis *can* transparently add prefixes for us, but it doesn't *in some cases*,
- // which means we can't use it. But this is something that should be looked into.
- const redisPrefix = env.REDIS_NAMESPACE ? `${env.REDIS_NAMESPACE}:` : '';
- let redisPort = parseIntFromEnv(env.REDIS_PORT, 6379, 'REDIS_PORT');
- let redisDatabase = parseIntFromEnv(env.REDIS_DB, 0, 'REDIS_DB');
- /** @type {import('ioredis').RedisOptions} */
- const redisParams = {
- host: env.REDIS_HOST || '127.0.0.1',
- port: redisPort,
- db: redisDatabase,
- password: env.REDIS_PASSWORD || undefined,
- };
- // redisParams.path takes precedence over host and port.
- if (env.REDIS_URL && env.REDIS_URL.startsWith('unix://')) {
- redisParams.path = env.REDIS_URL.slice(7);
- }
- return {
- redisParams,
- redisPrefix,
- redisUrl: typeof env.REDIS_URL === 'string' ? env.REDIS_URL : undefined,
- };
- };
- const PUBLIC_CHANNELS = [
- 'public',
- 'public:media',
- 'public:local',
- 'public:local:media',
- 'public:remote',
- 'public:remote:media',
- 'hashtag',
- 'hashtag:local',
- ];
- // Used for priming the counters/gauges for the various metrics that are
- // per-channel
- const CHANNEL_NAMES = [
- 'system',
- 'user',
- 'user:notification',
- 'list',
- 'direct',
- ...PUBLIC_CHANNELS
- ];
- const startServer = async () => {
- const pgPool = new pg.Pool(pgConfigFromEnv(process.env));
- const server = http.createServer();
- const wss = new WebSocket.Server({ noServer: true });
- // Set the X-Request-Id header on WebSockets:
- wss.on("headers", function onHeaders(headers, req) {
- headers.push(`X-Request-Id: ${req.id}`);
- });
- const app = express();
- app.set('trust proxy', process.env.TRUSTED_PROXY_IP ? process.env.TRUSTED_PROXY_IP.split(/(?:\s*,\s*|\s+)/) : 'loopback,uniquelocal');
- app.use(httpLogger);
- app.use(cors());
- // Handle eventsource & other http requests:
- server.on('request', app);
- // Handle upgrade requests:
- server.on('upgrade', async function handleUpgrade(request, socket, head) {
- // Setup the HTTP logger, since websocket upgrades don't get the usual http
- // logger. This decorates the `request` object.
- attachWebsocketHttpLogger(request);
- request.log.info("HTTP Upgrade Requested");
- /** @param {Error} err */
- const onSocketError = (err) => {
- request.log.error({ error: err }, err.message);
- };
- socket.on('error', onSocketError);
- /** @type {ResolvedAccount} */
- let resolvedAccount;
- try {
- resolvedAccount = await accountFromRequest(request);
- } catch (err) {
- // Unfortunately for using the on('upgrade') setup, we need to manually
- // write a HTTP Response to the Socket to close the connection upgrade
- // attempt, so the following code is to handle all of that.
- const statusCode = err.status ?? 401;
- /** @type {Record<string, string | number | import('pino-http').ReqId>} */
- const headers = {
- 'Connection': 'close',
- 'Content-Type': 'text/plain',
- 'Content-Length': 0,
- 'X-Request-Id': request.id,
- 'X-Error-Message': err.status ? err.toString() : 'An unexpected error occurred'
- };
- // Ensure the socket is closed once we've finished writing to it:
- socket.once('finish', () => {
- socket.destroy();
- });
- // Write the HTTP response manually:
- socket.end(`HTTP/1.1 ${statusCode} ${http.STATUS_CODES[statusCode]}\r\n${Object.keys(headers).map((key) => `${key}: ${headers[key]}`).join('\r\n')}\r\n\r\n`);
- // Finally, log the error:
- request.log.error({
- err,
- res: {
- statusCode,
- headers
- }
- }, err.toString());
- return;
- }
- // Remove the error handler, wss.handleUpgrade has its own:
- socket.removeListener('error', onSocketError);
- wss.handleUpgrade(request, socket, head, function done(ws) {
- request.log.info("Authenticated request & upgraded to WebSocket connection");
- const wsLogger = createWebsocketLogger(request, resolvedAccount);
- // Start the connection:
- wss.emit('connection', ws, request, wsLogger);
- });
- });
- /**
- * @type {Object.<string, Array.<function(Object<string, any>): void>>}
- */
- const subs = {};
- const redisConfig = redisConfigFromEnv(process.env);
- const redisSubscribeClient = await createRedisClient(redisConfig);
- const redisClient = await createRedisClient(redisConfig);
- const { redisPrefix } = redisConfig;
- const metrics = setupMetrics(CHANNEL_NAMES, pgPool);
- // TODO: migrate all metrics to metrics.X.method() instead of just X.method()
- const {
- connectedClients,
- connectedChannels,
- redisSubscriptions,
- redisMessagesReceived,
- messagesSent,
- } = metrics;
- // When checking metrics in the browser, the favicon is requested this
- // prevents the request from falling through to the API Router, which would
- // error for this endpoint:
- app.get('/favicon.ico', (_req, res) => res.status(404).end());
- app.get('/api/v1/streaming/health', (_req, res) => {
- res.writeHead(200, { 'Content-Type': 'text/plain' });
- res.end('OK');
- });
- app.get('/metrics', async (req, res) => {
- try {
- res.set('Content-Type', metrics.register.contentType);
- res.end(await metrics.register.metrics());
- } catch (ex) {
- req.log.error(ex);
- res.status(500).end();
- }
- });
- /**
- * @param {string[]} channels
- * @returns {function(): void}
- */
- const subscriptionHeartbeat = channels => {
- const interval = 6 * 60;
- const tellSubscribed = () => {
- channels.forEach(channel => redisClient.set(`${redisPrefix}subscribed:${channel}`, '1', 'EX', interval * 3));
- };
- tellSubscribed();
- const heartbeat = setInterval(tellSubscribed, interval * 1000);
- return () => {
- clearInterval(heartbeat);
- };
- };
- /**
- * @param {string} channel
- * @param {string} message
- */
- const onRedisMessage = (channel, message) => {
- redisMessagesReceived.inc();
- const callbacks = subs[channel];
- logger.debug(`New message on channel ${redisPrefix}${channel}`);
- if (!callbacks) {
- return;
- }
- const json = parseJSON(message, null);
- if (!json) return;
- callbacks.forEach(callback => callback(json));
- };
- redisSubscribeClient.on("message", onRedisMessage);
- /**
- * @callback SubscriptionListener
- * @param {ReturnType<parseJSON>} json of the message
- * @returns void
- */
- /**
- * @param {string} channel
- * @param {SubscriptionListener} callback
- */
- const subscribe = (channel, callback) => {
- logger.debug(`Adding listener for ${channel}`);
- subs[channel] = subs[channel] || [];
- if (subs[channel].length === 0) {
- logger.debug(`Subscribe ${channel}`);
- redisSubscribeClient.subscribe(channel, (err, count) => {
- if (err) {
- logger.error(`Error subscribing to ${channel}`);
- } else if (typeof count === 'number') {
- redisSubscriptions.set(count);
- }
- });
- }
- subs[channel].push(callback);
- };
- /**
- * @param {string} channel
- * @param {SubscriptionListener} callback
- */
- const unsubscribe = (channel, callback) => {
- logger.debug(`Removing listener for ${channel}`);
- if (!subs[channel]) {
- return;
- }
- subs[channel] = subs[channel].filter(item => item !== callback);
- if (subs[channel].length === 0) {
- logger.debug(`Unsubscribe ${channel}`);
- redisSubscribeClient.unsubscribe(channel, (err, count) => {
- if (err) {
- logger.error(`Error unsubscribing to ${channel}`);
- } else if (typeof count === 'number') {
- redisSubscriptions.set(count);
- }
- });
- delete subs[channel];
- }
- };
- /**
- * @param {http.IncomingMessage & ResolvedAccount} req
- * @param {string[]} necessaryScopes
- * @returns {boolean}
- */
- const isInScope = (req, necessaryScopes) =>
- req.scopes.some(scope => necessaryScopes.includes(scope));
- /**
- * @param {string} token
- * @param {any} req
- * @returns {Promise<ResolvedAccount>}
- */
- const accountFromToken = (token, req) => new Promise((resolve, reject) => {
- pgPool.connect((err, client, done) => {
- if (err) {
- reject(err);
- return;
- }
- // @ts-ignore
- client.query('SELECT oauth_access_tokens.id, oauth_access_tokens.resource_owner_id, users.account_id, users.chosen_languages, oauth_access_tokens.scopes, devices.device_id FROM oauth_access_tokens INNER JOIN users ON oauth_access_tokens.resource_owner_id = users.id LEFT OUTER JOIN devices ON oauth_access_tokens.id = devices.access_token_id WHERE oauth_access_tokens.token = $1 AND oauth_access_tokens.revoked_at IS NULL LIMIT 1', [token], (err, result) => {
- done();
- if (err) {
- reject(err);
- return;
- }
- if (result.rows.length === 0) {
- err = new Error('Invalid access token');
- // @ts-ignore
- err.status = 401;
- reject(err);
- return;
- }
- req.accessTokenId = result.rows[0].id;
- req.scopes = result.rows[0].scopes.split(' ');
- req.accountId = result.rows[0].account_id;
- req.chosenLanguages = result.rows[0].chosen_languages;
- req.deviceId = result.rows[0].device_id;
- resolve({
- accessTokenId: result.rows[0].id,
- scopes: result.rows[0].scopes.split(' '),
- accountId: result.rows[0].account_id,
- chosenLanguages: result.rows[0].chosen_languages,
- deviceId: result.rows[0].device_id
- });
- });
- });
- });
- /**
- * @param {any} req
- * @returns {Promise<ResolvedAccount>}
- */
- const accountFromRequest = (req) => new Promise((resolve, reject) => {
- const authorization = req.headers.authorization;
- const location = url.parse(req.url, true);
- const accessToken = location.query.access_token || req.headers['sec-websocket-protocol'];
- if (!authorization && !accessToken) {
- const err = new Error('Missing access token');
- // @ts-ignore
- err.status = 401;
- reject(err);
- return;
- }
- const token = authorization ? authorization.replace(/^Bearer /, '') : accessToken;
- resolve(accountFromToken(token, req));
- });
- /**
- * @param {any} req
- * @returns {string|undefined}
- */
- const channelNameFromPath = req => {
- const { path, query } = req;
- const onlyMedia = isTruthy(query.only_media);
- switch (path) {
- case '/api/v1/streaming/user':
- return 'user';
- case '/api/v1/streaming/user/notification':
- return 'user:notification';
- case '/api/v1/streaming/public':
- return onlyMedia ? 'public:media' : 'public';
- case '/api/v1/streaming/public/local':
- return onlyMedia ? 'public:local:media' : 'public:local';
- case '/api/v1/streaming/public/remote':
- return onlyMedia ? 'public:remote:media' : 'public:remote';
- case '/api/v1/streaming/hashtag':
- return 'hashtag';
- case '/api/v1/streaming/hashtag/local':
- return 'hashtag:local';
- case '/api/v1/streaming/direct':
- return 'direct';
- case '/api/v1/streaming/list':
- return 'list';
- default:
- return undefined;
- }
- };
- /**
- * @param {http.IncomingMessage & ResolvedAccount} req
- * @param {import('pino').Logger} logger
- * @param {string|undefined} channelName
- * @returns {Promise.<void>}
- */
- const checkScopes = (req, logger, channelName) => new Promise((resolve, reject) => {
- logger.debug(`Checking OAuth scopes for ${channelName}`);
- // When accessing public channels, no scopes are needed
- if (channelName && PUBLIC_CHANNELS.includes(channelName)) {
- resolve();
- return;
- }
- // The `read` scope has the highest priority, if the token has it
- // then it can access all streams
- const requiredScopes = ['read'];
- // When accessing specifically the notifications stream,
- // we need a read:notifications, while in all other cases,
- // we can allow access with read:statuses. Mind that the
- // user stream will not contain notifications unless
- // the token has either read or read:notifications scope
- // as well, this is handled separately.
- if (channelName === 'user:notification') {
- requiredScopes.push('read:notifications');
- } else {
- requiredScopes.push('read:statuses');
- }
- if (req.scopes && requiredScopes.some(requiredScope => req.scopes.includes(requiredScope))) {
- resolve();
- return;
- }
- const err = new Error('Access token does not cover required scopes');
- // @ts-ignore
- err.status = 401;
- reject(err);
- });
- /**
- * @typedef SystemMessageHandlers
- * @property {function(): void} onKill
- */
- /**
- * @param {any} req
- * @param {SystemMessageHandlers} eventHandlers
- * @returns {SubscriptionListener}
- */
- const createSystemMessageListener = (req, eventHandlers) => {
- return message => {
- if (!message?.event) {
- return;
- }
- const { event } = message;
- req.log.debug(`System message for ${req.accountId}: ${event}`);
- if (event === 'kill') {
- req.log.debug(`Closing connection for ${req.accountId} due to expired access token`);
- eventHandlers.onKill();
- } else if (event === 'filters_changed') {
- req.log.debug(`Invalidating filters cache for ${req.accountId}`);
- req.cachedFilters = null;
- }
- };
- };
- /**
- * @param {http.IncomingMessage & ResolvedAccount} req
- * @param {http.OutgoingMessage} res
- */
- const subscribeHttpToSystemChannel = (req, res) => {
- const accessTokenChannelId = `timeline:access_token:${req.accessTokenId}`;
- const systemChannelId = `timeline:system:${req.accountId}`;
- const listener = createSystemMessageListener(req, {
- onKill() {
- res.end();
- },
- });
- res.on('close', () => {
- unsubscribe(`${redisPrefix}${accessTokenChannelId}`, listener);
- unsubscribe(`${redisPrefix}${systemChannelId}`, listener);
- connectedChannels.labels({ type: 'eventsource', channel: 'system' }).dec(2);
- });
- subscribe(`${redisPrefix}${accessTokenChannelId}`, listener);
- subscribe(`${redisPrefix}${systemChannelId}`, listener);
- connectedChannels.labels({ type: 'eventsource', channel: 'system' }).inc(2);
- };
- /**
- * @param {any} req
- * @param {any} res
- * @param {function(Error=): void} next
- */
- const authenticationMiddleware = (req, res, next) => {
- if (req.method === 'OPTIONS') {
- next();
- return;
- }
- const channelName = channelNameFromPath(req);
- // If no channelName can be found for the request, then we should terminate
- // the connection, as there's nothing to stream back
- if (!channelName) {
- const err = new Error('Unknown channel requested');
- // @ts-ignore
- err.status = 400;
- next(err);
- return;
- }
- accountFromRequest(req).then(() => checkScopes(req, req.log, channelName)).then(() => {
- subscribeHttpToSystemChannel(req, res);
- }).then(() => {
- next();
- }).catch(err => {
- next(err);
- });
- };
- /**
- * @param {Error} err
- * @param {any} req
- * @param {any} res
- * @param {function(Error=): void} next
- */
- const errorMiddleware = (err, req, res, next) => {
- req.log.error({ err }, err.toString());
- if (res.headersSent) {
- next(err);
- return;
- }
- const hasStatusCode = Object.hasOwnProperty.call(err, 'status');
- // @ts-ignore
- const statusCode = hasStatusCode ? err.status : 500;
- const errorMessage = hasStatusCode ? err.toString() : 'An unexpected error occurred';
- res.writeHead(statusCode, { 'Content-Type': 'application/json' });
- res.end(JSON.stringify({ error: errorMessage }));
- };
- /**
- * @param {any[]} arr
- * @param {number=} shift
- * @returns {string}
- */
- // @ts-ignore
- const placeholders = (arr, shift = 0) => arr.map((_, i) => `$${i + 1 + shift}`).join(', ');
- /**
- * @param {string} listId
- * @param {any} req
- * @returns {Promise.<void>}
- */
- const authorizeListAccess = (listId, req) => new Promise((resolve, reject) => {
- const { accountId } = req;
- pgPool.connect((err, client, done) => {
- if (err) {
- reject();
- return;
- }
- // @ts-ignore
- client.query('SELECT id, account_id FROM lists WHERE id = $1 LIMIT 1', [listId], (err, result) => {
- done();
- if (err || result.rows.length === 0 || result.rows[0].account_id !== accountId) {
- reject();
- return;
- }
- resolve();
- });
- });
- });
- /**
- * @param {string[]} channelIds
- * @param {http.IncomingMessage & ResolvedAccount} req
- * @param {import('pino').Logger} log
- * @param {function(string, string): void} output
- * @param {undefined | function(string[], SubscriptionListener): void} attachCloseHandler
- * @param {'websocket' | 'eventsource'} destinationType
- * @param {boolean=} needsFiltering
- * @returns {SubscriptionListener}
- */
- const streamFrom = (channelIds, req, log, output, attachCloseHandler, destinationType, needsFiltering = false) => {
- log.info({ channelIds }, `Starting stream`);
- /**
- * @param {string} event
- * @param {object|string} payload
- */
- const transmit = (event, payload) => {
- // TODO: Replace "string"-based delete payloads with object payloads:
- const encodedPayload = typeof payload === 'object' ? JSON.stringify(payload) : payload;
- messagesSent.labels({ type: destinationType }).inc(1);
- log.debug({ event, payload }, `Transmitting ${event} to ${req.accountId}`);
- output(event, encodedPayload);
- };
- // The listener used to process each message off the redis subscription,
- // message here is an object with an `event` and `payload` property. Some
- // events also include a queued_at value, but this is being removed shortly.
- /** @type {SubscriptionListener} */
- const listener = message => {
- if (!message?.event || !message?.payload) {
- return;
- }
- const { event, payload } = message;
- // Streaming only needs to apply filtering to some channels and only to
- // some events. This is because majority of the filtering happens on the
- // Ruby on Rails side when producing the event for streaming.
- //
- // The only events that require filtering from the streaming server are
- // `update` and `status.update`, all other events are transmitted to the
- // client as soon as they're received (pass-through).
- //
- // The channels that need filtering are determined in the function
- // `channelNameToIds` defined below:
- if (!needsFiltering || (event !== 'update' && event !== 'status.update')) {
- transmit(event, payload);
- return;
- }
- // The rest of the logic from here on in this function is to handle
- // filtering of statuses:
- // Filter based on language:
- if (Array.isArray(req.chosenLanguages) && payload.language !== null && req.chosenLanguages.indexOf(payload.language) === -1) {
- log.debug(`Message ${payload.id} filtered by language (${payload.language})`);
- return;
- }
- // When the account is not logged in, it is not necessary to confirm the block or mute
- if (!req.accountId) {
- transmit(event, payload);
- return;
- }
- // Filter based on domain blocks, blocks, mutes, or custom filters:
- // @ts-ignore
- const targetAccountIds = [payload.account.id].concat(payload.mentions.map(item => item.id));
- const accountDomain = payload.account.acct.split('@')[1];
- // TODO: Move this logic out of the message handling loop
- pgPool.connect((err, client, releasePgConnection) => {
- if (err) {
- log.error(err);
- return;
- }
- const queries = [
- // @ts-ignore
- client.query(`SELECT 1
- FROM blocks
- WHERE (account_id = $1 AND target_account_id IN (${placeholders(targetAccountIds, 2)}))
- OR (account_id = $2 AND target_account_id = $1)
- UNION
- SELECT 1
- FROM mutes
- WHERE account_id = $1
- AND target_account_id IN (${placeholders(targetAccountIds, 2)})`, [req.accountId, payload.account.id].concat(targetAccountIds)),
- ];
- if (accountDomain) {
- // @ts-ignore
- queries.push(client.query('SELECT 1 FROM account_domain_blocks WHERE account_id = $1 AND domain = $2', [req.accountId, accountDomain]));
- }
- // @ts-ignore
- if (!payload.filtered && !req.cachedFilters) {
- // @ts-ignore
- queries.push(client.query('SELECT filter.id AS id, filter.phrase AS title, filter.context AS context, filter.expires_at AS expires_at, filter.action AS filter_action, keyword.keyword AS keyword, keyword.whole_word AS whole_word FROM custom_filter_keywords keyword JOIN custom_filters filter ON keyword.custom_filter_id = filter.id WHERE filter.account_id = $1 AND (filter.expires_at IS NULL OR filter.expires_at > NOW())', [req.accountId]));
- }
- Promise.all(queries).then(values => {
- releasePgConnection();
- // Handling blocks & mutes and domain blocks: If one of those applies,
- // then we don't transmit the payload of the event to the client
- if (values[0].rows.length > 0 || (accountDomain && values[1].rows.length > 0)) {
- return;
- }
- // If the payload already contains the `filtered` property, it means
- // that filtering has been applied on the ruby on rails side, as
- // such, we don't need to construct or apply the filters in streaming:
- if (Object.prototype.hasOwnProperty.call(payload, "filtered")) {
- transmit(event, payload);
- return;
- }
- // Handling for constructing the custom filters and caching them on the request
- // TODO: Move this logic out of the message handling lifecycle
- // @ts-ignore
- if (!req.cachedFilters) {
- const filterRows = values[accountDomain ? 2 : 1].rows;
- // @ts-ignore
- req.cachedFilters = filterRows.reduce((cache, filter) => {
- if (cache[filter.id]) {
- cache[filter.id].keywords.push([filter.keyword, filter.whole_word]);
- } else {
- cache[filter.id] = {
- keywords: [[filter.keyword, filter.whole_word]],
- expires_at: filter.expires_at,
- filter: {
- id: filter.id,
- title: filter.title,
- context: filter.context,
- expires_at: filter.expires_at,
- // filter.filter_action is the value from the
- // custom_filters.action database column, it is an integer
- // representing a value in an enum defined by Ruby on Rails:
- //
- // enum { warn: 0, hide: 1 }
- filter_action: ['warn', 'hide'][filter.filter_action],
- },
- };
- }
- return cache;
- }, {});
- // Construct the regular expressions for the custom filters: This
- // needs to be done in a separate loop as the database returns one
- // filterRow per keyword, so we need all the keywords before
- // constructing the regular expression
- // @ts-ignore
- Object.keys(req.cachedFilters).forEach((key) => {
- // @ts-ignore
- req.cachedFilters[key].regexp = new RegExp(req.cachedFilters[key].keywords.map(([keyword, whole_word]) => {
- let expr = keyword.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
- if (whole_word) {
- if (/^[\w]/.test(expr)) {
- expr = `\\b${expr}`;
- }
- if (/[\w]$/.test(expr)) {
- expr = `${expr}\\b`;
- }
- }
- return expr;
- }).join('|'), 'i');
- });
- }
- // Apply cachedFilters against the payload, constructing a
- // `filter_results` array of FilterResult entities
- // @ts-ignore
- if (req.cachedFilters) {
- const status = payload;
- // TODO: Calculate searchableContent in Ruby on Rails:
- // @ts-ignore
- const searchableContent = ([status.spoiler_text || '', status.content].concat((status.poll && status.poll.options) ? status.poll.options.map(option => option.title) : [])).concat(status.media_attachments.map(att => att.description)).join('\n\n').replace(/<br\s*\/?>/g, '\n').replace(/<\/p><p>/g, '\n\n');
- const searchableTextContent = JSDOM.fragment(searchableContent).textContent;
- const now = new Date();
- // @ts-ignore
- const filter_results = Object.values(req.cachedFilters).reduce((results, cachedFilter) => {
- // Check the filter hasn't expired before applying:
- if (cachedFilter.expires_at !== null && cachedFilter.expires_at < now) {
- return results;
- }
- // Just in-case JSDOM fails to find textContent in searchableContent
- if (!searchableTextContent) {
- return results;
- }
- const keyword_matches = searchableTextContent.match(cachedFilter.regexp);
- if (keyword_matches) {
- // results is an Array of FilterResult; status_matches is always
- // null as we only are only applying the keyword-based custom
- // filters, not the status-based custom filters.
- // https://docs.joinmastodon.org/entities/FilterResult/
- results.push({
- filter: cachedFilter.filter,
- keyword_matches,
- status_matches: null
- });
- }
- return results;
- }, []);
- // Send the payload + the FilterResults as the `filtered` property
- // to the streaming connection. To reach this code, the `event` must
- // have been either `update` or `status.update`, meaning the
- // `payload` is a Status entity, which has a `filtered` property:
- //
- // filtered: https://docs.joinmastodon.org/entities/Status/#filtered
- transmit(event, {
- ...payload,
- filtered: filter_results
- });
- } else {
- transmit(event, payload);
- }
- }).catch(err => {
- log.error(err);
- releasePgConnection();
- });
- });
- };
- channelIds.forEach(id => {
- subscribe(`${redisPrefix}${id}`, listener);
- });
- if (typeof attachCloseHandler === 'function') {
- attachCloseHandler(channelIds.map(id => `${redisPrefix}${id}`), listener);
- }
- return listener;
- };
- /**
- * @param {any} req
- * @param {any} res
- * @returns {function(string, string): void}
- */
- const streamToHttp = (req, res) => {
- const channelName = channelNameFromPath(req);
- connectedClients.labels({ type: 'eventsource' }).inc();
- // In theory we'll always have a channel name, but channelNameFromPath can return undefined:
- if (typeof channelName === 'string') {
- connectedChannels.labels({ type: 'eventsource', channel: channelName }).inc();
- }
- res.setHeader('Content-Type', 'text/event-stream');
- res.setHeader('Cache-Control', 'no-store');
- res.setHeader('Transfer-Encoding', 'chunked');
- res.write(':)\n');
- const heartbeat = setInterval(() => res.write(':thump\n'), 15000);
- req.on('close', () => {
- req.log.info({ accountId: req.accountId }, `Ending stream`);
- // We decrement these counters here instead of in streamHttpEnd as in that
- // method we don't have knowledge of the channel names
- connectedClients.labels({ type: 'eventsource' }).dec();
- // In theory we'll always have a channel name, but channelNameFromPath can return undefined:
- if (typeof channelName === 'string') {
- connectedChannels.labels({ type: 'eventsource', channel: channelName }).dec();
- }
- clearInterval(heartbeat);
- });
- return (event, payload) => {
- res.write(`event: ${event}\n`);
- res.write(`data: ${payload}\n\n`);
- };
- };
- /**
- * @param {any} req
- * @param {function(): void} [closeHandler]
- * @returns {function(string[], SubscriptionListener): void}
- */
- const streamHttpEnd = (req, closeHandler = undefined) => (ids, listener) => {
- req.on('close', () => {
- ids.forEach(id => {
- unsubscribe(id, listener);
- });
- if (closeHandler) {
- closeHandler();
- }
- });
- };
- /**
- * @param {http.IncomingMessage} req
- * @param {WebSocket} ws
- * @param {string[]} streamName
- * @returns {function(string, string): void}
- */
- const streamToWs = (req, ws, streamName) => (event, payload) => {
- if (ws.readyState !== ws.OPEN) {
- req.log.error('Tried writing to closed socket');
- return;
- }
- const message = JSON.stringify({ stream: streamName, event, payload });
- ws.send(message, (/** @type {Error|undefined} */ err) => {
- if (err) {
- req.log.error({err}, `Failed to send to websocket`);
- }
- });
- };
- /**
- * @param {any} res
- */
- const httpNotFound = res => {
- res.writeHead(404, { 'Content-Type': 'application/json' });
- res.end(JSON.stringify({ error: 'Not found' }));
- };
- const api = express.Router();
- app.use(api);
- api.use(authenticationMiddleware);
- api.use(errorMiddleware);
- api.get('/api/v1/streaming/*', (req, res) => {
- // @ts-ignore
- channelNameToIds(req, channelNameFromPath(req), req.query).then(({ channelIds, options }) => {
- const onSend = streamToHttp(req, res);
- const onEnd = streamHttpEnd(req, subscriptionHeartbeat(channelIds));
- // @ts-ignore
- streamFrom(channelIds, req, req.log, onSend, onEnd, 'eventsource', options.needsFiltering);
- }).catch(err => {
- res.log.info({ err }, 'Subscription error:', err.toString());
- httpNotFound(res);
- });
- });
- /**
- * @typedef StreamParams
- * @property {string} [tag]
- * @property {string} [list]
- * @property {string} [only_media]
- */
- /**
- * @param {any} req
- * @returns {string[]}
- */
- const channelsForUserStream = req => {
- const arr = [`timeline:${req.accountId}`];
- if (isInScope(req, ['crypto']) && req.deviceId) {
- arr.push(`timeline:${req.accountId}:${req.deviceId}`);
- }
- if (isInScope(req, ['read', 'read:notifications'])) {
- arr.push(`timeline:${req.accountId}:notifications`);
- }
- return arr;
- };
- /**
- * @param {any} req
- * @param {string} name
- * @param {StreamParams} params
- * @returns {Promise.<{ channelIds: string[], options: { needsFiltering: boolean } }>}
- */
- const channelNameToIds = (req, name, params) => new Promise((resolve, reject) => {
- switch (name) {
- case 'user':
- resolve({
- channelIds: channelsForUserStream(req),
- options: { needsFiltering: false },
- });
- break;
- case 'user:notification':
- resolve({
- channelIds: [`timeline:${req.accountId}:notifications`],
- options: { needsFiltering: false },
- });
- break;
- case 'public':
- resolve({
- channelIds: ['timeline:public'],
- options: { needsFiltering: true },
- });
- break;
- case 'public:local':
- resolve({
- channelIds: ['timeline:public:local'],
- options: { needsFiltering: true },
- });
- break;
- case 'public:remote':
- resolve({
- channelIds: ['timeline:public:remote'],
- options: { needsFiltering: true },
- });
- break;
- case 'public:media':
- resolve({
- channelIds: ['timeline:public:media'],
- options: { needsFiltering: true },
- });
- break;
- case 'public:local:media':
- resolve({
- channelIds: ['timeline:public:local:media'],
- options: { needsFiltering: true },
- });
- break;
- case 'public:remote:media':
- resolve({
- channelIds: ['timeline:public:remote:media'],
- options: { needsFiltering: true },
- });
- break;
- case 'direct':
- resolve({
- channelIds: [`timeline:direct:${req.accountId}`],
- options: { needsFiltering: false },
- });
- break;
- case 'hashtag':
- if (!params.tag || params.tag.length === 0) {
- reject('No tag for stream provided');
- } else {
- resolve({
- channelIds: [`timeline:hashtag:${normalizeHashtag(params.tag)}`],
- options: { needsFiltering: true },
- });
- }
- break;
- case 'hashtag:local':
- if (!params.tag || params.tag.length === 0) {
- reject('No tag for stream provided');
- } else {
- resolve({
- channelIds: [`timeline:hashtag:${normalizeHashtag(params.tag)}:local`],
- options: { needsFiltering: true },
- });
- }
- break;
- case 'list':
- // @ts-ignore
- authorizeListAccess(params.list, req).then(() => {
- resolve({
- channelIds: [`timeline:list:${params.list}`],
- options: { needsFiltering: false },
- });
- }).catch(() => {
- reject('Not authorized to stream this list');
- });
- break;
- default:
- reject('Unknown stream type');
- }
- });
- /**
- * @param {string} channelName
- * @param {StreamParams} params
- * @returns {string[]}
- */
- const streamNameFromChannelName = (channelName, params) => {
- if (channelName === 'list' && params.list) {
- return [channelName, params.list];
- } else if (['hashtag', 'hashtag:local'].includes(channelName) && params.tag) {
- return [channelName, params.tag];
- } else {
- return [channelName];
- }
- };
- /**
- * @typedef WebSocketSession
- * @property {WebSocket & { isAlive: boolean}} websocket
- * @property {http.IncomingMessage & ResolvedAccount} request
- * @property {import('pino').Logger} logger
- * @property {Object.<string, { channelName: string, listener: SubscriptionListener, stopHeartbeat: function(): void }>} subscriptions
- */
- /**
- * @param {WebSocketSession} session
- * @param {string} channelName
- * @param {StreamParams} params
- * @returns {void}
- */
- const subscribeWebsocketToChannel = ({ websocket, request, logger, subscriptions }, channelName, params) => {
- checkScopes(request, logger, channelName).then(() => channelNameToIds(request, channelName, params)).then(({
- channelIds,
- options,
- }) => {
- if (subscriptions[channelIds.join(';')]) {
- return;
- }
- const onSend = streamToWs(request, websocket, streamNameFromChannelName(channelName, params));
- const stopHeartbeat = subscriptionHeartbeat(channelIds);
- const listener = streamFrom(channelIds, request, logger, onSend, undefined, 'websocket', options.needsFiltering);
- connectedChannels.labels({ type: 'websocket', channel: channelName }).inc();
- subscriptions[channelIds.join(';')] = {
- channelName,
- listener,
- stopHeartbeat,
- };
- }).catch(err => {
- logger.error({ err }, 'Subscription error');
- websocket.send(JSON.stringify({ error: err.toString() }));
- });
- };
- /**
- * @param {WebSocketSession} session
- * @param {string[]} channelIds
- */
- const removeSubscription = ({ request, logger, subscriptions }, channelIds) => {
- logger.info({ channelIds, accountId: request.accountId }, `Ending stream`);
- const subscription = subscriptions[channelIds.join(';')];
- if (!subscription) {
- return;
- }
- channelIds.forEach(channelId => {
- unsubscribe(`${redisPrefix}${channelId}`, subscription.listener);
- });
- connectedChannels.labels({ type: 'websocket', channel: subscription.channelName }).dec();
- subscription.stopHeartbeat();
- delete subscriptions[channelIds.join(';')];
- };
- /**
- * @param {WebSocketSession} session
- * @param {string} channelName
- * @param {StreamParams} params
- * @returns {void}
- */
- const unsubscribeWebsocketFromChannel = (session, channelName, params) => {
- const { websocket, request, logger } = session;
- channelNameToIds(request, channelName, params).then(({ channelIds }) => {
- removeSubscription(session, channelIds);
- }).catch(err => {
- logger.error({err}, 'Unsubscribe error');
- // If we have a socket that is alive and open still, send the error back to the client:
- if (websocket.isAlive && websocket.readyState === websocket.OPEN) {
- websocket.send(JSON.stringify({ error: "Error unsubscribing from channel" }));
- }
- });
- };
- /**
- * @param {WebSocketSession} session
- */
- const subscribeWebsocketToSystemChannel = ({ websocket, request, subscriptions }) => {
- const accessTokenChannelId = `timeline:access_token:${request.accessTokenId}`;
- const systemChannelId = `timeline:system:${request.accountId}`;
- const listener = createSystemMessageListener(request, {
- onKill() {
- websocket.close();
- },
- });
- subscribe(`${redisPrefix}${accessTokenChannelId}`, listener);
- subscribe(`${redisPrefix}${systemChannelId}`, listener);
- subscriptions[accessTokenChannelId] = {
- channelName: 'system',
- listener,
- stopHeartbeat: () => {
- },
- };
- subscriptions[systemChannelId] = {
- channelName: 'system',
- listener,
- stopHeartbeat: () => {
- },
- };
- connectedChannels.labels({ type: 'websocket', channel: 'system' }).inc(2);
- };
- /**
- * @param {WebSocket & { isAlive: boolean }} ws
- * @param {http.IncomingMessage & ResolvedAccount} req
- * @param {import('pino').Logger} log
- */
- function onConnection(ws, req, log) {
- // Note: url.parse could throw, which would terminate the connection, so we
- // increment the connected clients metric straight away when we establish
- // the connection, without waiting:
- connectedClients.labels({ type: 'websocket' }).inc();
- // Setup connection keep-alive state:
- ws.isAlive = true;
- ws.on('pong', () => {
- ws.isAlive = true;
- });
- /**
- * @type {WebSocketSession}
- */
- const session = {
- websocket: ws,
- request: req,
- logger: log,
- subscriptions: {},
- };
- ws.on('close', function onWebsocketClose() {
- const subscriptions = Object.keys(session.subscriptions);
- subscriptions.forEach(channelIds => {
- removeSubscription(session, channelIds.split(';'));
- });
- // Decrement the metrics for connected clients:
- connectedClients.labels({ type: 'websocket' }).dec();
- // We need to delete the session object as to ensure it correctly gets
- // garbage collected, without doing this we could accidentally hold on to
- // references to the websocket, the request, and the logger, causing
- // memory leaks.
- //
- // @ts-ignore
- delete session;
- });
- // Note: immediately after the `error` event is emitted, the `close` event
- // is emitted. As such, all we need to do is log the error here.
- ws.on('error', (/** @type {Error} */ err) => {
- log.error(err);
- });
- ws.on('message', (data, isBinary) => {
- if (isBinary) {
- log.warn('Received binary data, closing connection');
- ws.close(1003, 'The mastodon streaming server does not support binary messages');
- return;
- }
- const message = data.toString('utf8');
- const json = parseJSON(message, session.request);
- if (!json) return;
- const { type, stream, ...params } = json;
- if (type === 'subscribe') {
- subscribeWebsocketToChannel(session, firstParam(stream), params);
- } else if (type === 'unsubscribe') {
- unsubscribeWebsocketFromChannel(session, firstParam(stream), params);
- } else {
- // Unknown action type
- }
- });
- subscribeWebsocketToSystemChannel(session);
- // Parse the URL for the connection arguments (if supplied), url.parse can throw:
- const location = req.url && url.parse(req.url, true);
- if (location && location.query.stream) {
- subscribeWebsocketToChannel(session, firstParam(location.query.stream), location.query);
- }
- }
- wss.on('connection', onConnection);
- setInterval(() => {
- wss.clients.forEach(ws => {
- // @ts-ignore
- if (ws.isAlive === false) {
- ws.terminate();
- return;
- }
- // @ts-ignore
- ws.isAlive = false;
- ws.ping('', false);
- });
- }, 30000);
- attachServerWithConfig(server, address => {
- logger.info(`Streaming API now listening on ${address}`);
- });
- const onExit = () => {
- server.close();
- process.exit(0);
- };
- /** @param {Error} err */
- const onError = (err) => {
- logger.error(err);
- server.close();
- process.exit(0);
- };
- process.on('SIGINT', onExit);
- process.on('SIGTERM', onExit);
- process.on('exit', onExit);
- process.on('uncaughtException', onError);
- };
- /**
- * @param {any} server
- * @param {function(string): void} [onSuccess]
- */
- const attachServerWithConfig = (server, onSuccess) => {
- if (process.env.SOCKET || process.env.PORT && isNaN(+process.env.PORT)) {
- server.listen(process.env.SOCKET || process.env.PORT, () => {
- if (onSuccess) {
- fs.chmodSync(server.address(), 0o666);
- onSuccess(server.address());
- }
- });
- } else {
- server.listen(+(process.env.PORT || 4000), process.env.BIND || '127.0.0.1', () => {
- if (onSuccess) {
- onSuccess(`${server.address().address}:${server.address().port}`);
- }
- });
- }
- };
- startServer();
|