diff --git a/packages/cli/package.json b/packages/cli/package.json index 25d3f87e21..89d13e9e9a 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -39,6 +39,7 @@ "@opentelemetry/sdk-trace-node": "^1.15.1", "@opentelemetry/semantic-conventions": "^1.15.1", "@types/js-yaml": "^3.12.5", + "@types/tunnel-ssh": "4.1.1", "ansi-escapes": "3.2.0", "async-file": "^2.0.2", "bytes": "^3.1.2", @@ -77,9 +78,9 @@ "strftime": "^0.10.0", "strip-ansi": "^6", "term-img": "^4.1.0", - "tmp": "^0.0.33", "true-myth": "2.2.3", "tslib": "1.14.1", + "tunnel-ssh": "4.1.6", "urijs": "^1.19.11", "uuid": "3.3.2", "valid-url": "^1.0.9", @@ -113,6 +114,7 @@ "@types/std-mocks": "^1.0.4", "@types/strftime": "^0.9.8", "@types/supports-color": "^5.3.0", + "@types/tmp": "^0.2.6", "@types/urijs": "^1.19.4", "@types/uuid": "^8.3.0", "@types/validator": "^10.9.0", @@ -136,6 +138,7 @@ "sinon": "^7.2.4", "std-mocks": "^2.0.0", "strip-ansi": "6.0.1", + "tmp": "^0.2.3", "ts-node": "^10.9.1", "tsheredoc": "^1.0.1", "typescript": "4.8.4" diff --git a/packages/cli/src/commands/addons/index.ts b/packages/cli/src/commands/addons/index.ts index 87af213bfe..080474a9d0 100644 --- a/packages/cli/src/commands/addons/index.ts +++ b/packages/cli/src/commands/addons/index.ts @@ -141,7 +141,7 @@ function formatAttachment(attachment: Heroku.AddOnAttachment, showApp = true) { return output.join(' ') } -function renderAttachment(attachment: Heroku.AddOnAttachment, app: string, isFirst = false) { +export function renderAttachment(attachment: Heroku.AddOnAttachment, app: string, isFirst = false): string { const line = isFirst ? '\u2514\u2500' : '\u251C\u2500' const attName = formatAttachment(attachment, attachment.app?.name !== app) return printf(' %s %s', color.dim(line), attName) diff --git a/packages/pg-v5/commands/bloat.js b/packages/cli/src/commands/pg/bloat.ts similarity index 76% rename from packages/pg-v5/commands/bloat.js rename to packages/cli/src/commands/pg/bloat.ts index d529eae05c..c8b8d801cc 100644 --- a/packages/pg-v5/commands/bloat.js +++ b/packages/cli/src/commands/pg/bloat.ts @@ -1,14 +1,9 @@ -'use strict' +import {Command, flags} from '@heroku-cli/command' +import {Args} from '@oclif/core' +import {database} from '../../lib/pg/fetcher' +import {exec} from '../../lib/pg/psql' -const cli = require('heroku-cli-util') - -async function run(context, heroku) { - const fetcher = require('../lib/fetcher')(heroku) - const psql = require('../lib/psql') - - let db = await fetcher.database(context.app, context.args.database) - - let query = ` +const query = ` WITH constants AS ( SELECT current_setting('block_size')::numeric AS bs, 23 AS hdr, 4 AS ma ), bloat_info AS ( @@ -71,19 +66,24 @@ FROM ORDER BY raw_waste DESC, bloat DESC ` - let output = await psql.exec(db, query) - process.stdout.write(output) -} +export default class Bloat extends Command { + static topic = 'pg'; + static description = 'show table and index bloat in your database ordered by most wasteful'; + static flags = { + app: flags.app({required: true}), + remote: flags.remote(), + }; + + static args = { + database: Args.string(), + }; -const cmd = { - topic: 'pg', - description: 'show table and index bloat in your database ordered by most wasteful', - needsApp: true, - needsAuth: true, - args: [{name: 'database', optional: true}], - run: cli.command({preauth: true}, run), + public async run(): Promise { + const {flags, args} = await this.parse(Bloat) + const {app} = flags + const db = await database(this.heroku, app, args.database) + const output = await exec(db, query) + process.stdout.write(output) + } } -module.exports = [ - Object.assign({command: 'bloat'}, cmd), -] diff --git a/packages/cli/src/lib/pg/bastion.ts b/packages/cli/src/lib/pg/bastion.ts new file mode 100644 index 0000000000..7fe75f42e3 --- /dev/null +++ b/packages/cli/src/lib/pg/bastion.ts @@ -0,0 +1,132 @@ +const debug = require('debug')('pg') +import {APIClient} from '@heroku-cli/command' +import * as EventEmitter from 'node:events' +import * as createTunnel from 'tunnel-ssh' +import {promisify} from 'util' +import host from './host' +import {getConnectionDetails} from './util' + +export const getBastion = function (config:Record, baseName: string) { + // If there are bastions, extract a host and a key + // otherwise, return an empty Object + + // If there are bastions: + // * there should be one *_BASTION_KEY + // * pick one host from the comma-separated list in *_BASTIONS + // We assert that _BASTIONS and _BASTION_KEY always exist together + // If either is falsy, pretend neither exist + + const bastionKey = config[`${baseName}_BASTION_KEY`] + const bastions = (config[`${baseName}_BASTIONS`] || '').split(',') + const bastionHost = bastions[Math.floor(Math.random() * bastions.length)] + return (bastionKey && bastionHost) ? {bastionHost, bastionKey} : {} +} + +export const env = (db: ReturnType) => { + const baseEnv = Object.assign({ + PGAPPNAME: 'psql non-interactive', + PGSSLMODE: (!db.host || db.host === 'localhost') ? 'prefer' : 'require', + }, process.env) + const mapping:Record> = { + PGUSER: 'user', + PGPASSWORD: 'password', + PGDATABASE: 'database', + PGPORT: 'port', + PGHOST: 'host', + } + Object.keys(mapping).forEach(envVar => { + const val = db[mapping[envVar]] + if (val) { + baseEnv[envVar] = val as string + } + }) + return baseEnv +} + +export function tunnelConfig(db: ReturnType) { + const localHost = '127.0.0.1' + // eslint-disable-next-line no-mixed-operators + const localPort = Math.floor(Math.random() * (65535 - 49152) + 49152) + return { + username: 'bastion', + host: db.bastionHost, + privateKey: db.bastionKey, + dstHost: db.host, + dstPort: Number.parseInt(db.port, 10), + localHost: localHost, + localPort: localPort, + } +} + +export function getConfigs(db: ReturnType) { + const dbEnv: NodeJS.ProcessEnv = env(db) + const dbTunnelConfig = tunnelConfig(db) + if (db.bastionKey) { + Object.assign(dbEnv, { + PGPORT: dbTunnelConfig.localPort, + PGHOST: dbTunnelConfig.localHost, + }) + } + + return { + dbEnv, + dbTunnelConfig, + } +} + +class Timeout { + private readonly timeout: number + private readonly message: string + private readonly events = new EventEmitter() + private timer: NodeJS.Timeout | undefined + + constructor(timeout: number, message: string) { + this.timeout = timeout + this.message = message + } + + async promise() { + this.timer = setTimeout(() => { + this.events.emit('error', new Error(this.message)) + }, this.timeout) + + try { + await EventEmitter.once(this.events, 'cancelled') + } finally { + clearTimeout(this.timer) + } + } + + cancel() { + this.events.emit('cancelled') + } +} + +export async function sshTunnel(db: ReturnType, dbTunnelConfig: ReturnType, timeout = 10000) { + if (!db.bastionKey) { + return null + } + + const timeoutInstance = new Timeout(timeout, 'Establishing a secure tunnel timed out') + const createSSHTunnel = promisify(createTunnel) + try { + return await Promise.race([ + timeoutInstance.promise(), + createSSHTunnel(dbTunnelConfig), + ]) + } catch (error) { + debug(error) + throw new Error('Unable to establish a secure tunnel to your database.') + } finally { + timeoutInstance.cancel() + } +} + +export async function fetchConfig(heroku:APIClient, db: {id: string}) { + return heroku.get<{host: string, private_key:string}>( + `/client/v11/databases/${encodeURIComponent(db.id)}/bastion`, + { + hostname: host(), + }, + ) +} diff --git a/packages/cli/src/lib/pg/fetcher.ts b/packages/cli/src/lib/pg/fetcher.ts index 76e6bd546a..889d96f868 100644 --- a/packages/cli/src/lib/pg/fetcher.ts +++ b/packages/cli/src/lib/pg/fetcher.ts @@ -3,10 +3,11 @@ import type {AddOnAttachment} from '@heroku-cli/schema' import * as Heroku from '@heroku-cli/schema' import debug from 'debug' import {AmbiguousError, appAttachment, NotFound} from '../addons/resolve' +import {fetchConfig} from './bastion' import {getConfig} from './config' import color from '@heroku-cli/color' import type {AddOnAttachmentWithConfigVarsAndPlan} from './types' -import {getConfigVarName} from './util' +import {bastionKeyPlan, getConfigVarName, getConnectionDetails} from './util' const pgDebug = debug('pg') @@ -132,3 +133,23 @@ async function allAttachments(heroku: APIClient, app_id: string) { export async function getAddon(heroku: APIClient, app: string, db = 'DATABASE_URL') { return ((await attachment(heroku, app, db))).addon } + +export async function database(heroku: APIClient, app: string, db?: string, namespace?: string) { + const attached = await attachment(heroku, app, db, namespace) + + // would inline this as well but in some cases attachment pulls down config + // as well, and we would request twice at the same time but I did not want + // to push this down into attachment because we do not always need config + const config = await getConfig(heroku, attached.app.name as string) + + const database = getConnectionDetails(attached, config) + if (bastionKeyPlan(attached.addon) && !database.bastionKey) { + const {body: bastionConfig} = await fetchConfig(heroku, attached.addon) + const bastionHost = bastionConfig.host + const bastionKey = bastionConfig.private_key + + Object.assign(database, {bastionHost, bastionKey}) + } + + return database +} diff --git a/packages/cli/src/lib/pg/psql.ts b/packages/cli/src/lib/pg/psql.ts new file mode 100644 index 0000000000..1f9eb29ae1 --- /dev/null +++ b/packages/cli/src/lib/pg/psql.ts @@ -0,0 +1,272 @@ +import {ux} from '@oclif/core' +import {spawn, SpawnOptions, type SpawnOptionsWithStdioTuple} from 'child_process' +import debug from 'debug' +import * as fs from 'fs' +import type {ChildProcess} from 'node:child_process' +import {EventEmitter, once} from 'node:events' +import type {Server} from 'node:net' +import * as path from 'node:path' +import {Stream} from 'node:stream' +import {finished} from 'node:stream/promises' +import {getConfigs, sshTunnel} from './bastion' +import {getConnectionDetails} from './util' + +const pgDebug = debug('pg') +export function psqlQueryOptions(query:string, dbEnv: NodeJS.ProcessEnv, cmdArgs: string[] = []) { + pgDebug('Running query: %s', query.trim()) + + const psqlArgs = ['-c', query, '--set', 'sslmode=require', ...cmdArgs] + + const childProcessOptions: SpawnOptionsWithStdioTuple<'ignore', 'pipe', 'inherit'> = { + stdio: ['ignore', 'pipe', 'inherit'], + } + + return { + dbEnv, + psqlArgs, + childProcessOptions, + } +} + +export function psqlFileOptions(file: string, dbEnv: NodeJS.ProcessEnv) { + pgDebug('Running sql file: %s', file.trim()) + + const childProcessOptions:SpawnOptions = { + stdio: ['ignore', 'pipe', 'inherit'], + } + + const psqlArgs = ['-f', file, '--set', 'sslmode=require'] + + return { + dbEnv, + psqlArgs, + childProcessOptions, + } +} + +export function psqlInteractiveOptions(prompt: string, dbEnv: NodeJS.ProcessEnv) { + let psqlArgs = ['--set', `PROMPT1=${prompt}`, '--set', `PROMPT2=${prompt}`] + const psqlHistoryPath = process.env.HEROKU_PSQL_HISTORY + if (psqlHistoryPath) { + if (fs.existsSync(psqlHistoryPath) && fs.statSync(psqlHistoryPath).isDirectory()) { + const appLogFile = `${psqlHistoryPath}/${prompt.split(':')[0]}` + pgDebug('Logging psql history to %s', appLogFile) + psqlArgs = psqlArgs.concat(['--set', `HISTFILE=${appLogFile}`]) + } else if (fs.existsSync(path.dirname(psqlHistoryPath))) { + pgDebug('Logging psql history to %s', psqlHistoryPath) + psqlArgs = psqlArgs.concat(['--set', `HISTFILE=${psqlHistoryPath}`]) + } else { + ux.warn(`HEROKU_PSQL_HISTORY is set but is not a valid path (${psqlHistoryPath})`) + } + } + + psqlArgs = psqlArgs.concat(['--set', 'sslmode=require']) + + const childProcessOptions: SpawnOptions = { + stdio: 'inherit', + } + + return { + dbEnv, + psqlArgs, + childProcessOptions, + } +} + +export function execPSQL({dbEnv, psqlArgs, childProcessOptions}: {dbEnv: NodeJS.ProcessEnv, psqlArgs: string[], childProcessOptions: SpawnOptions}) { + const options = { + env: dbEnv, + ...childProcessOptions, + } + + pgDebug('opening psql process') + const psql = spawn('psql', psqlArgs, options) + psql.once('spawn', () => pgDebug('psql process spawned')) + + return psql +} + +export async function waitForPSQLExit(psql: EventEmitter) { + let errorToThrow: Error | null = null + try { + const [exitCode] = await once(psql, 'close') + + pgDebug(`psql exited with code ${exitCode}`) + if (exitCode > 0) { + errorToThrow = new Error(`psql exited with code ${exitCode}`) + } + } catch (error) { + pgDebug('psql process error', error) + const {code} = error as {code: string} + if (code === 'ENOENT') { + errorToThrow = new Error('The local psql command could not be located. For help installing psql, see https://devcenter.heroku.com/articles/heroku-postgresql#local-setup') + } + } + + if (errorToThrow) { + throw errorToThrow + } +} + +// According to node.js docs, sending a kill to a process won't cause an error +// but could have unintended consequences if the PID gets reassigned: +// https://nodejs.org/docs/latest-v14.x/api/child_process.html#child_process_subprocess_kill_signal +// To be on the safe side, check if the process was already killed before sending the signal +function kill(childProcess: ChildProcess, signal: number | NodeJS.Signals | undefined) { + if (!childProcess.killed) { + pgDebug('killing psql child process') + childProcess.kill(signal) + } +} + +// trap SIGINT so that ctrl+c can be used by psql without killing the +// parent node process. +// you can use ctrl+c in psql to kill running queries +// while keeping the psql process open. +// This code is to stop the parent node process (heroku CLI) +// from exiting. If the parent Heroku CLI node process exits, then psql will exit as it +// is a child process of the Heroku CLI node process. +export const trapAndForwardSignalsToChildProcess = (childProcess: ChildProcess) => { + const signalsToTrap: NodeJS.Signals[] = ['SIGINT'] + const signalTraps = signalsToTrap.map(signal => { + process.removeAllListeners(signal) + const listener = () => kill(childProcess, signal) + process.on(signal, listener) + return [signal, listener] + }) as ([NodeJS.Signals, () => void])[] + + // restores the built-in node ctrl+c and other handlers + return () => { + signalTraps.forEach(([signal, listener]) => { + process.removeListener(signal as string, listener) + }) + } +} + +export function consumeStream(inputStream: Stream) { + let result = '' + const throughStream = new Stream.PassThrough() + + // eslint-disable-next-line no-async-promise-executor + const promise = new Promise(async (resolve, reject) => { + try { + await finished(throughStream) + resolve(result) + } catch (error) { + reject(error) + } + }) + + // eslint-disable-next-line no-return-assign + throughStream.on('data', chunk => result += chunk.toString()) + inputStream.pipe(throughStream) + return promise +} + +export async function runWithTunnel(db:Parameters[0], tunnelConfig: Parameters[1], options: Parameters[0]): Promise { + const tunnel = await Tunnel.connect(db, tunnelConfig) + pgDebug('after create tunnel') + + const psql = execPSQL(options) + // interactive opens with stdio: 'inherit' + // which gives the child process the same stdin,stdout,stderr of the node process (global `process`) + // https://nodejs.org/api/child_process.html#child_process_options_stdio + // psql.stdout will be null in this case + // return a string for consistency but ideally we should return the child process from this function + // and let the caller decide what to do with stdin/stdout/stderr + const stdoutPromise = psql.stdout ? consumeStream(psql.stdout) : Promise.resolve('') + const cleanupSignalTraps = trapAndForwardSignalsToChildProcess(psql) + + try { + pgDebug('waiting for psql or tunnel to exit') + // wait for either psql or tunnel to exit; + // the important bit is that we ensure both processes are + // always cleaned up in the `finally` block below + await Promise.race([ + waitForPSQLExit(psql), + tunnel.waitForClose(), + ]) + } catch (error) { + pgDebug('wait for psql or tunnel error', error) + throw error + } finally { + pgDebug('begin tunnel cleanup') + cleanupSignalTraps() + tunnel.close() + kill(psql, 'SIGKILL') + pgDebug('end tunnel cleanup') + } + + return stdoutPromise as Promise +} + +// a small wrapper around tunnel-ssh +// so that other code doesn't have to worry about +// whether there is or is not a tunnel +export class Tunnel { + private readonly bastionTunnel: Server + private readonly events: EventEmitter + constructor(bastionTunnel: Server) { + this.bastionTunnel = bastionTunnel + this.events = new EventEmitter() + } + + async waitForClose() { + if (this.bastionTunnel) { + try { + pgDebug('wait for tunnel close') + await once(this.bastionTunnel, 'close') + pgDebug('tunnel closed') + } catch (error) { + pgDebug('tunnel close error', error) + throw new Error('Secure tunnel to your database failed') + } + } else { + pgDebug('no bastion required; waiting for fake close event') + await once(this.events, 'close') + } + } + + close() { + if (this.bastionTunnel) { + pgDebug('close tunnel') + this.bastionTunnel.close() + } else { + pgDebug('no tunnel necessary; sending fake close event') + this.events.emit('close', 0) + } + } + + static async connect(db: Parameters[0], tunnelConfig: Parameters[1]) { + const tunnel = await sshTunnel(db, tunnelConfig) + return new Tunnel(tunnel as Server) + } +} + +export async function fetchVersion(db: Parameters[0]) { + const output = await exec(db, 'SHOW server_version', ['-X', '-q']) + return output.match(/[0-9]{1,}\.[0-9]{1,}/)?.[0] +} + +export async function exec(db: Parameters[0], query: string, cmdArgs: string[] = []) { + const configs = getConfigs(db) + const options = psqlQueryOptions(query, configs.dbEnv, cmdArgs) + return runWithTunnel(db, configs.dbTunnelConfig, options) +} + +export async function execFile(db:Parameters[0], file: string) { + const configs = getConfigs(db) + const options = psqlFileOptions(file, configs.dbEnv) + + return runWithTunnel(db, configs.dbTunnelConfig, options) +} + +export async function interactive(db: ReturnType) { + const name = db.attachment.name + const prompt = `${db.attachment.app.name}::${name}%R%# ` + const configs = getConfigs(db) + configs.dbEnv.PGAPPNAME = 'psql interactive' // default was 'psql non-interactive` + const options = psqlInteractiveOptions(prompt, configs.dbEnv) + + return runWithTunnel(db, configs.dbTunnelConfig, options) +} diff --git a/packages/cli/src/lib/pg/types.ts b/packages/cli/src/lib/pg/types.ts index 70fe4ad723..ac8b7ac7ab 100644 --- a/packages/cli/src/lib/pg/types.ts +++ b/packages/cli/src/lib/pg/types.ts @@ -50,11 +50,15 @@ export type Link = { name: string, remote?: Link, } +export type CredentialsState = 'active' | 'rotating' | 'enabling' | 'revoking' export type CredentialsInfo = { database: string host: string port: number + name: string + state: 'active' | 'rotating' credentials: { + connections: number user: string password: string state: string diff --git a/packages/cli/src/lib/pg/util.ts b/packages/cli/src/lib/pg/util.ts index 8cc8efca27..356e06ab9d 100644 --- a/packages/cli/src/lib/pg/util.ts +++ b/packages/cli/src/lib/pg/util.ts @@ -1,4 +1,13 @@ -import type {AddOnAttachmentWithConfigVarsAndPlan} from './types' +import color from '@heroku-cli/color' +import type {AddOnAttachment} from '@heroku-cli/schema' +import {ux} from '@oclif/core' +import debug from 'debug' +import {renderAttachment} from '../../commands/addons' +import {multiSortCompareFn} from '../utils/multisort' +import {getBastion} from './bastion' +import type {AddOnAttachmentWithConfigVarsAndPlan, CredentialsInfo} from './types' + +const env = require('process').env export function getConfigVarName(configVars: string[]): string { const connStringVars = configVars.filter(cv => (cv.endsWith('_URL'))) @@ -9,6 +18,183 @@ export function getConfigVarName(configVars: string[]): string { export const essentialNumPlan = (addon: AddOnAttachmentWithConfigVarsAndPlan) => Boolean(addon?.plan?.name?.split(':')[1].match(/^essential/)) export const legacyEssentialPlan = (addon: AddOnAttachmentWithConfigVarsAndPlan) => Boolean(addon?.plan?.name?.split(':')[1].match(/(dev|basic|mini)$/)) -export function essentialPlan(addon:AddOnAttachmentWithConfigVarsAndPlan) { +export function essentialPlan(addon: AddOnAttachmentWithConfigVarsAndPlan) { return essentialNumPlan(addon) || legacyEssentialPlan(addon) } + +function getConfigVarNameFromAttachment(attachment: Required, config: Record = {}): string { + const configVars = attachment.config_vars?.filter((cv: string) => { + return config[cv]?.startsWith('postgres://') + }) ?? [] + if (configVars.length === 0) { + throw new Error(`No config vars found for ${attachment.name}; perhaps they were removed as a side effect of ${color.cmd('heroku rollback')}? Use ${color.cmd('heroku addons:attach')} to create a new attachment and then ${color.cmd('heroku addons:detach')} to remove the current attachment.`) + } + + const configVarName = `${attachment.name}_URL` + if (configVars.includes(configVarName) && configVarName in config) { + return configVarName + } + + return getConfigVarName(configVars) +} + +export function presentCredentialAttachments(app: string, credAttachments: Required[], credentials: CredentialsInfo[], cred: unknown) { + const isForeignApp = (attOrAddon: Required) => attOrAddon.app.name === app ? 0 : 1 + const comparators = [ + (a: Required, b: Required) => { + const fa = isForeignApp(a) + const fb = isForeignApp(b) + return fa < fb ? -1 : (fb < fa ? 1 : 0) + }, + (a: Required, b: Required) => a.name.localeCompare(b.name), + (a: Required, b: Required) => a.app?.name?.localeCompare(b.app?.name ?? '') ?? 0, + ] + credAttachments.sort(multiSortCompareFn(comparators)) + // render each attachment under the credential + const attLines = credAttachments.map(function (attachment, idx) { + const isLast = (idx === credAttachments.length - 1) + return renderAttachment(attachment, app, isLast) + }) + + const rotationLines = [] + const credentialStore = credentials.find(a => a.name === cred) + if (credentialStore?.state === 'rotating') { + const formatted = credentialStore?.credentials.map(credential => { + return { + user: credential.user, + state: credential.state, + connections: credential.connections, + } + }) + // eslint-disable-next-line no-eq-null, eqeqeq + const connectionInformationAvailable = formatted.some(c => c.connections != null) + if (connectionInformationAvailable) { + const prefix = ' ' + rotationLines.push(`${prefix}Usernames currently active for this credential:`) + ux.table(formatted, { + user: { + get(row: typeof formatted[0]) { + return `${prefix}${row.user}` + }, + }, + state: { + get(row) { + return row.state === 'revoking' ? 'waiting for no connections to be revoked' : row.state + }, + }, + connections: { + get(row) { + return `${row.connections} connections` + }, + }, + }, { + 'no-header': true, + printLine(line: unknown): void { + rotationLines.push(line) + }, + }) + } + } + + return [cred, ...attLines, ...rotationLines].join('\n') +} + +export const getConnectionDetails = (attachment: Required, config: Record = {}) => { + const connstringVar = getConfigVarNameFromAttachment(attachment, config) + + // remove _URL from the end of the config var name + const baseName = connstringVar.slice(0, -4) + + // build the default payload for non-bastion dbs + debug(`Using "${connstringVar}" to connect to your database…`) + + const conn = parsePostgresConnectionString(config[connstringVar]) + + const payload = { + user: conn.username, + password: conn.password, + database: conn.database, + host: conn.hostname, + port: conn.port, + attachment, + url: conn, + bastionKey: '', + bastionHost: '', + } + + // If bastion creds exist, graft it into the payload + const bastion = getBastion(config, baseName) + if (bastion) { + Object.assign(payload, bastion) + } + + return payload +} + +export const bastionKeyPlan = (a: AddOnAttachmentWithConfigVarsAndPlan) => Boolean(a.plan.name.match(/private/)) + +export const configVarNamesFromValue = (config: Record, value: string) => { + const keys: string[] = [] + for (const key of Object.keys(config)) { + const configVal = config[key] + if (configVal === value) { + keys.push(key) + } else if (configVal.startsWith('postgres://')) { + try { + const configURL = new URL(configVal) + const ourURL = new URL(value) + const components: (keyof URL)[] = ['protocol', 'hostname', 'port', 'pathname'] + if (components.every(component => ourURL[component] === configURL[component])) { + keys.push(key) + } + } catch { + // ignore -- this is not a valid URL so not a matching URL + } + } + } + + const comparator = (a: string, b: string) => { + const isDatabaseUrlA = Number(a === 'DATABASE_URL') + const isDatabaseUrlB = Number(b === 'DATABASE_URL') + return isDatabaseUrlA < isDatabaseUrlB ? -1 : (isDatabaseUrlB < isDatabaseUrlA ? 1 : 0) + } + + return keys.sort(comparator) +} + +export const databaseNameFromUrl = (uri: string, config: Record) => { + const names = configVarNamesFromValue(config, uri) + let name = names.pop() + while (names.length > 0 && name === 'DATABASE_URL') name = names.pop() + if (name) { + return color.configVar(name.replace(/_URL$/, '')) + } + + const conn = exports.parsePostgresConnectionString(uri) + return `${conn.host}:${conn.port}${conn.pathname}` +} + +export const parsePostgresConnectionString = (db: string): Omit & {database: string | null} => { + const dbUrl = new URL(db.match(/:\/\//) ? db : `postgres:///${db}`) + const databaseName = dbUrl.pathname || null + let database: string | null + if (databaseName && databaseName.charAt(0) === '/') { + database = databaseName.slice(1) || null + } else { + database = databaseName + } + + dbUrl.port = dbUrl.port || env.PGPORT + if (dbUrl.hostname) { + dbUrl.port = dbUrl.port || '5432' + } + + // Strange behavior in that we cannot spread the + // props of a URL. i.e. {...dbUrl, database} does not work + const {pathname, host, port, password, username, hostname, href, origin, protocol} = dbUrl + return {pathname, host, port, password, username, hostname, href, origin, protocol, database} +} diff --git a/packages/cli/src/lib/utils/multisort.ts b/packages/cli/src/lib/utils/multisort.ts new file mode 100644 index 0000000000..563299f0a2 --- /dev/null +++ b/packages/cli/src/lib/utils/multisort.ts @@ -0,0 +1,63 @@ +export type Comparator = Parameters[0] +/** + * The multiSortCompareFn function is used to + * build a single comparator function for use + * in Array.sort when multiple sort criteria + * is needed on an object type. The indices of + * specified array of SortCriteria indicate the + * precedence of each comparator. + * + * @example + * ```ts + * type User = { + * firstName: string + * lastName: string + * } + * const localeCompare = (a: string, b: string) => a.localeCompare(b) + * const comparators = [ + * (a: User, b: User) => localeCompare(a.firstName, b.firstName), + * (a: User, b: User) => localeCompare(a.lastName, b.lastName) + * ] + * + * const users: User[] = [ + * {fistName: 'Bill', lastName: 'Stevens'}, + * {firstName: 'Jill', lastName: 'Ames'}, + * {firstName: 'Bill', lastName: 'Bernard'}, + * ] + * users.sort(multiSortCompareFn(comparators)) // Bill Bernard, Bill Stevens, Jill Ames + * ``` + * @param comparators The array of Comparators whose indices indicate sort precedence + * @returns Comparator + */ +export function multiSortCompareFn(comparators: Comparator[]): Comparator { + // Typical bitmask strategy whereas the most + // significant bit represents the comparator + // result in the zero index and thus has the + // highest precedence. The bit length + // is determined by the number of comparators + // and the positional notation mirrors the + // comparator indices. + // There is a 32 bit limit in total. 2 bits + // are used for 1. the bitLength and 2. the two's + // compliment signed bit. This means we have a + // limit of 30 comparators max. + return (a: unknown, b: unknown): 1 | -1 | 0 => { + const bitLen = comparators.length - 1 + let bitA = 0 + let bitB = 0 + + comparators.forEach((comparator, index) => { + const priority = 1 << (bitLen - index) + const score = comparator?.(a, b) + if (score === -1) { + bitA |= priority + } + + if (score === 1) { + bitB |= priority + } + }) + return bitA > bitB ? -1 : (bitA < bitB ? 1 : 0) + } +} + diff --git a/packages/cli/test/unit/lib/pg/psql.unit.test.ts b/packages/cli/test/unit/lib/pg/psql.unit.test.ts new file mode 100644 index 0000000000..73ce975b0e --- /dev/null +++ b/packages/cli/test/unit/lib/pg/psql.unit.test.ts @@ -0,0 +1,702 @@ +import {expect} from '@oclif/test' +import {ChildProcess, type SpawnOptions} from 'node:child_process' +import {EventEmitter, once} from 'node:events' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {PassThrough} from 'node:stream' +import * as proxyquire from 'proxyquire' +import type {SinonExpectation} from 'sinon' +import type {getConnectionDetails} from '../../../../src/lib/pg/util' +import {unwrap} from '../../../helpers/utils/unwrap' +import sinon = require('sinon') +import * as tmp from 'tmp' +import type * as Pgsql from '../../../../src/lib/pg/psql' +import type * as Bastion from '../../../../src/lib/pg/bastion' +import {constants, SignalConstants} from 'os' + +type DB = ReturnType +const db: DB = { + attachment: {} as DB['attachment'], + user: 'jeff', + password: 'pass', + database: 'mydb', + port: '5432', + host: 'localhost', + bastionHost: '', + bastionKey: '', + url: {} as DB['url'], +} + +const bastionDb:DB = { + attachment: {} as DB['attachment'], + user: 'jeff', + password: 'pass', + database: 'mydb', + port: '5432', + bastionHost: 'bastion-host', + bastionKey: 'super-private-key', + host: 'localhost', + url: {} as DB['url'], +} + +const NOW_OUTPUT = ` +now +------------------------------- + 2020-12-16 09:54:01.916894-08 +(1 row) +` + +const VERSION_OUTPUT = ` +server_version +------------------------------- +11.16 (Ubuntu 11.16-1.pgdg20.04+1) +(1 row) +` + +describe('psql', () => { + let fakePsqlProcess: FakeChildProcess | undefined + let fakeTunnel: TunnelStub | undefined + let tunnelStub: sinon.SinonStub + let sandbox: sinon.SinonSandbox + let mockSpawn: ReturnType + let psql: typeof Pgsql + let bastion: typeof Bastion + + beforeEach(() => { + sandbox = sinon.createSandbox() + tunnelStub = sandbox.stub().callsFake((_config, callback) => { + fakeTunnel = new TunnelStub() + callback(null, fakeTunnel) + }) + mockSpawn = createSpawnMocker(sandbox) + bastion = proxyquire('../../../../src/lib/pg/bastion', { + 'tunnel-ssh': tunnelStub, + }) + psql = proxyquire('../../../../src/lib/pg/psql', { + './bastion': bastion, + }) + fakePsqlProcess = new FakeChildProcess() + sandbox.stub(Math, 'random').callsFake(() => 0) + }) + + afterEach(async () => { + await fakePsqlProcess?.teardown() + // eslint-disable-next-line no-multi-assign + fakeTunnel = fakePsqlProcess = undefined + sandbox.restore() + }) + + async function ensureFinished(promise: Promise) { + try { + return await promise + } catch { + if (fakeTunnel) { + if (!fakeTunnel?.exited) { + throw new Error('tunnel was not closed') + } + } + + if (!fakePsqlProcess?.exited) { + throw new Error('psql process did not close') + } + } + } + + describe('exec', () => { + it('runs psql', async () => { + const expectedEnv = Object.freeze({ + PGAPPNAME: 'psql non-interactive', + PGSSLMODE: 'prefer', + PGUSER: 'jeff', + PGPASSWORD: 'pass', + PGDATABASE: 'mydb', + PGPORT: '5432', + PGHOST: 'localhost', + }) + + const mock = mockSpawn( + 'psql', + ['-c', 'SELECT NOW();', '--set', 'sslmode=require'], + { + stdio: ['ignore', 'pipe', 'inherit'], + env: expectedEnv, + }, + ) + + mock.callsFake(() => { + fakePsqlProcess?.start() + return fakePsqlProcess + }) + + const promise = psql.exec(db, 'SELECT NOW();') + await fakePsqlProcess?.waitForStart() + mock.verify() + fakePsqlProcess?.stdout.write(NOW_OUTPUT) + await fakePsqlProcess?.simulateExit(0) + const output = await ensureFinished(promise) + expect(output).to.equal(NOW_OUTPUT) + }) + + it('runs psql with supplied args', async () => { + const expectedEnv = Object.freeze({ + PGAPPNAME: 'psql non-interactive', + PGSSLMODE: 'prefer', + PGUSER: 'jeff', + PGPASSWORD: 'pass', + PGDATABASE: 'mydb', + PGPORT: '5432', + PGHOST: 'localhost', + }) + + const mock = mockSpawn( + 'psql', + ['-c', 'SELECT NOW();', '--set', 'sslmode=require', '-t', '-q'], + { + stdio: ['ignore', 'pipe', 'inherit'], + env: expectedEnv, + }, + ) + + mock.callsFake(() => { + fakePsqlProcess?.start() + return fakePsqlProcess + }) + + const promise = psql.exec(db, 'SELECT NOW();', ['-t', '-q']) + await fakePsqlProcess?.waitForStart() + mock.verify() + fakePsqlProcess?.stdout.write(NOW_OUTPUT) + await fakePsqlProcess?.simulateExit(0) + const output = await ensureFinished(promise) + expect(output).to.equal(NOW_OUTPUT) + }) + + it('runs psql and throws an error if psql exits with exit code > 0', async () => { + const expectedEnv = Object.freeze({ + PGAPPNAME: 'psql non-interactive', + PGSSLMODE: 'prefer', + PGUSER: 'jeff', + PGPASSWORD: 'pass', + PGDATABASE: 'mydb', + PGPORT: '5432', + PGHOST: 'localhost', + }) + + const mock = mockSpawn( + 'psql', + ['-c', 'SELECT NOW();', '--set', 'sslmode=require'], + { + stdio: ['ignore', 'pipe', 'inherit'], + env: expectedEnv, + }, + ) + + mock.callsFake(() => { + fakePsqlProcess?.start() + return fakePsqlProcess + }) + + const promise = psql.exec(db, 'SELECT NOW();') + await fakePsqlProcess?.waitForStart() + mock.verify() + + try { + expect(fakePsqlProcess?.exited).to.equal(false) + await fakePsqlProcess?.simulateExit(1) + await ensureFinished(promise) + throw new Error('psql.exec should have thrown') + } catch (error) { + const {message} = error as {message: string} + expect(message).to.equal('psql exited with code 1') + } + }) + + describe('private databases (not shield)', () => { + it('opens an SSH tunnel and runs psql for bastion databases', async () => { + const tunnelConf = { + username: 'bastion', + host: 'bastion-host', + privateKey: 'super-private-key', + dstHost: 'localhost', + dstPort: 5432, + localHost: '127.0.0.1', + localPort: 49152, + } + const mock = mockSpawn( + 'psql', + ['-c', 'SELECT NOW();', '--set', 'sslmode=require'], + sinon.match.any as unknown as Parameters[2], + ) + + mock.callsFake(() => { + fakePsqlProcess?.start() + return fakePsqlProcess + }) + const promise = psql.exec(bastionDb, 'SELECT NOW();') + await fakePsqlProcess?.waitForStart() + mock.verify() + expect(tunnelStub.withArgs(tunnelConf).calledOnce).to.equal(true) + await fakePsqlProcess?.simulateExit(0) + await ensureFinished(promise) + }) + + it('closes the tunnel manually if psql exits and the tunnel does not close on its own', async () => { + const tunnelConf = { + username: 'bastion', + host: 'bastion-host', + privateKey: 'super-private-key', + dstHost: 'localhost', + dstPort: 5432, + localHost: '127.0.0.1', + localPort: 49152, + } + const mock = mockSpawn( + 'psql', + ['-c', 'SELECT NOW();', '--set', 'sslmode=require'], + sinon.match.any as unknown as Parameters[2], + ) + + mock.callsFake(() => { + fakePsqlProcess?.start() + return fakePsqlProcess + }) + + const promise = psql.exec(bastionDb, 'SELECT NOW();') + await fakePsqlProcess?.waitForStart() + mock.verify() + expect(tunnelStub.withArgs(tunnelConf).calledOnce).to.equal(true) + expect(fakeTunnel?.exited).to.equal(false) + await fakePsqlProcess?.simulateExit(0) + await ensureFinished(promise) + expect(fakeTunnel?.exited).to.equal(true) + }) + + it('closes psql manually if the tunnel exits and psql does not close on its own', async () => { + const tunnelConf = { + username: 'bastion', + host: 'bastion-host', + privateKey: 'super-private-key', + dstHost: 'localhost', + dstPort: 5432, + localHost: '127.0.0.1', + localPort: 49152, + } + const mock = mockSpawn( + 'psql', + ['-c', 'SELECT NOW();', '--set', 'sslmode=require'], + sinon.match.any as unknown as Parameters[2], + ) + + mock.callsFake(() => { + fakePsqlProcess?.start() + return fakePsqlProcess + }) + + const execPromise = psql.exec(bastionDb, 'SELECT NOW();') + await fakePsqlProcess?.waitForStart() + mock.verify() + expect(tunnelStub.withArgs(tunnelConf).calledOnce).to.equal(true) + expect(fakePsqlProcess?.exited).to.equal(false) + fakeTunnel?.close() + await ensureFinished(execPromise) + expect(fakePsqlProcess?.exited).to.equal(true) + }) + }) + }) + + describe('fetchVersion', () => { + it('gets the server version', async () => { + const expectedEnv = Object.freeze({ + PGAPPNAME: 'psql non-interactive', + PGSSLMODE: 'prefer', + PGUSER: 'jeff', + PGPASSWORD: 'pass', + PGDATABASE: 'mydb', + PGPORT: '5432', + PGHOST: 'localhost', + }) + + const mock = mockSpawn( + 'psql', + ['-c', 'SHOW server_version', '--set', 'sslmode=require', '-X', '-q'], + { + stdio: ['ignore', 'pipe', 'inherit'], + env: expectedEnv, + }, + ) + + mock.callsFake(() => { + fakePsqlProcess?.start() + return fakePsqlProcess + }) + + const promise = psql.fetchVersion(db) + await fakePsqlProcess?.waitForStart() + mock.verify() + fakePsqlProcess?.stdout.write(VERSION_OUTPUT) + await fakePsqlProcess?.simulateExit(0) + const output = await ensureFinished(promise) + expect(output).to.equal('11.16') + }) + }) + + describe('execFile', () => { + it('runs psql with file as input', async () => { + const expectedEnv = Object.freeze({ + PGAPPNAME: 'psql non-interactive', + PGSSLMODE: 'prefer', + PGUSER: 'jeff', + PGPASSWORD: 'pass', + PGDATABASE: 'mydb', + PGPORT: '5432', + PGHOST: 'localhost', + }) + + const mock = mockSpawn( + 'psql', + ['-f', 'test.sql', '--set', 'sslmode=require'], + { + stdio: ['ignore', 'pipe', 'inherit'], + env: expectedEnv, + }, + ) + mock.callsFake(() => { + fakePsqlProcess?.start() + return fakePsqlProcess + }) + + const promise = psql.execFile(db, 'test.sql') + await fakePsqlProcess?.waitForStart() + mock.verify() + await fakePsqlProcess?.simulateExit(0) + await ensureFinished(promise) + }) + it('opens an SSH tunnel and runs psql for bastion databases', async () => { + const tunnelConf = { + username: 'bastion', + host: 'bastion-host', + privateKey: 'super-private-key', + dstHost: 'localhost', + dstPort: 5432, + localHost: '127.0.0.1', + localPort: 49152, + } + + const mock = mockSpawn( + 'psql', + ['-f', 'test.sql', '--set', 'sslmode=require'], + { + stdio: ['ignore', 'pipe', 'inherit'], + env: sinon.match.object as unknown as Parameters[2]['env'], + }, + ) + mock.callsFake(() => { + fakePsqlProcess?.start() + return fakePsqlProcess + }) + + const promise = psql.execFile(bastionDb, 'test.sql') + await fakePsqlProcess?.waitForStart() + mock.verify() + expect(tunnelStub.withArgs(tunnelConf).calledOnce).to.equal(true) + await fakePsqlProcess?.simulateExit(0) + await ensureFinished(promise) + }) + }) + + describe('psqlInteractive', () => { + const db = { + attachment: { + app: { + name: 'sleepy-hollow-9876', + }, + name: 'DATABASE', + }, + } as DB + + context('when HEROKU_PSQL_HISTORY is set', () => { + let historyPath: string + + function mockHerokuPSQLHistory(path: string) { + process.env.HEROKU_PSQL_HISTORY = path + } + + before(() => { + tmp.setGracefulCleanup() + }) + + afterEach(() => { + delete process.env.HEROKU_PSQL_HISTORY + }) + + context('when HEROKU_PSQL_HISTORY is a valid directory path', () => { + beforeEach(() => { + historyPath = tmp.dirSync().name + mockHerokuPSQLHistory(historyPath) + }) + + afterEach(() => { + fs.rmdirSync(historyPath) + }) + + it('is the directory path to per-app history files', async () => { + const expectedArgs = [ + '--set', + 'PROMPT1=sleepy-hollow-9876::DATABASE%R%# ', + '--set', + 'PROMPT2=sleepy-hollow-9876::DATABASE%R%# ', + '--set', + `HISTFILE=${historyPath}/sleepy-hollow-9876`, + '--set', + 'sslmode=require', + ] + + const expectedEnv = Object.freeze({ + PGAPPNAME: 'psql interactive', + PGSSLMODE: 'prefer', + }) + + const mock = mockSpawn( + 'psql', + expectedArgs, + { + stdio: 'inherit', + env: expectedEnv, + }, + ) + + mock.callsFake(() => { + fakePsqlProcess?.start() + return fakePsqlProcess + }) + + const promise = psql.interactive(db) + await fakePsqlProcess?.waitForStart() + await fakePsqlProcess?.simulateExit(0) + mock.verify() + const output = await ensureFinished(promise) + // psql interactive doesn't pipe output to the process + // ensure promise returned resolves with a promise anyway + expect(output).to.equal('') + }) + }) + + context('when HEROKU_PSQL_HISTORY is a valid file path', () => { + beforeEach(function () { + historyPath = tmp.fileSync().name + mockHerokuPSQLHistory(historyPath) + }) + + afterEach(() => { + fs.unlinkSync(historyPath) + }) + + it('is the path to the history file', async () => { + const expectedEnv = Object.freeze({ + PGAPPNAME: 'psql interactive', + PGSSLMODE: 'prefer', + }) + + const expectedArgs = [ + '--set', + 'PROMPT1=sleepy-hollow-9876::DATABASE%R%# ', + '--set', + 'PROMPT2=sleepy-hollow-9876::DATABASE%R%# ', + '--set', + `HISTFILE=${process.env.HEROKU_PSQL_HISTORY}`, + '--set', + 'sslmode=require', + ] + + const mock = mockSpawn( + 'psql', + expectedArgs, + { + stdio: 'inherit', + env: expectedEnv, + }, + ) + + mock.callsFake(() => { + fakePsqlProcess?.start() + return fakePsqlProcess + }) + + const promise = psql.interactive(db) + await fakePsqlProcess?.waitForStart() + await fakePsqlProcess?.simulateExit(0) + mock.verify() + const output = await ensureFinished(promise) + // psql interactive doesn't pipe output to the process + // ensure promise returned resolves with a promise anyway + expect(output).to.equal('') + }) + }) + + context('when HEROKU_PSQL_HISTORY is an invalid path', async () => { + it('issues a warning', async () => { + const invalidPath = path.join('/', 'path', 'to', 'history') + mockHerokuPSQLHistory(invalidPath) + + const cli = require('heroku-cli-util') + cli.mockConsole() + + const expectedEnv = Object.freeze({ + PGAPPNAME: 'psql interactive', + PGSSLMODE: 'prefer', + }) + + const expectedArgs = [ + '--set', + 'PROMPT1=sleepy-hollow-9876::DATABASE%R%# ', + '--set', + 'PROMPT2=sleepy-hollow-9876::DATABASE%R%# ', + '--set', + 'sslmode=require', + ] + + const mock = mockSpawn( + 'psql', + expectedArgs, + { + stdio: 'inherit', + env: expectedEnv, + }, + ) + + mock.callsFake(() => { + fakePsqlProcess?.start() + return fakePsqlProcess + }) + + const promise = psql.interactive(db) + await fakePsqlProcess?.waitForStart() + await fakePsqlProcess?.simulateExit(0) + mock.verify() + const expectedMessage = `HEROKU_PSQL_HISTORY is set but is not a valid path (${invalidPath})\n` + + await ensureFinished(promise) + expect(unwrap(cli.stderr)).to.equal(expectedMessage) + }) + }) + }) + }) +}) + +function isSinonMatcher(value: unknown): value is {test: CallableFunction} { + return Boolean(value && typeof value === 'object' && 'test' in value && typeof (value as {test: unknown}).test === 'function') +} + +// create a sinon matcher that only asserts on ENV values we expect. +// we don't want to leak other ENV variables to the console in CI. +// it also makes the test output easier by not listing all the environment variables available in process.env +function matchEnv(expectedEnv: NodeJS.ProcessEnv) { + const matcher = (actualEnv: NodeJS.ProcessEnv) => { + const reducedActualEnv = Object.entries(expectedEnv).reduce((memo, [key, value]) => { + if (key in actualEnv) { + memo[key] = value as string + } + + return memo + }, {} as Record) + sinon.match(expectedEnv).test(reducedActualEnv) + + return true + } + + return sinon.match(matcher, 'env contains expected keys and values') +} + +class FakeChildProcess extends EventEmitter { + public ready = false + public exited = false + public killed = false + public stdout = new PassThrough() + private _killedWithSignal: keyof SignalConstants | undefined + + async waitForStart() { + if (!this.ready) { + await once(this, 'ready') + } + } + + start() { + this.ready = true + this.emit('ready') + } + + async simulateExit(code: number): Promise { + if (!this.exited) { + return new Promise(resolve => { + this.exited = true + this.stdout.end() + process.nextTick(() => { + try { + this.emit('close', code) + } finally { + resolve() + } + }) + }) + } + } + + kill(signal: keyof SignalConstants): void { + this.killed = true + this._killedWithSignal = signal + const killedWithCode = constants.signals[signal] + this.simulateExit(killedWithCode) + } + + get killedWithSignal() { + return this._killedWithSignal + } + + async teardown() { + await this.simulateExit(0) + this.removeAllListeners() + } +} + +class TunnelStub extends EventEmitter { + public exited = false + + close() { + this.exited = true + process.nextTick(() => { + this.emit('close') + }) + } +} + +function createSpawnMocker(sandbox: sinon.SinonSandbox): (commandName: string, expectedArgs: string[], expectedOptions: SpawnOptions & {env: NodeJS.ProcessEnv}) => SinonExpectation { + return function (commandName: string, expectedArgs: string[], expectedOptions: SpawnOptions & {env: NodeJS.ProcessEnv}) { + const spawnMock = sandbox.mock(ChildProcess) + const {env: expectedEnv} = expectedOptions + + let optionsMatchers + if (isSinonMatcher(expectedOptions)) { + optionsMatchers = expectedOptions + } else { + optionsMatchers = Object.entries(expectedOptions).reduce((memo, [key, value]) => { + let matcher + if (key === 'env') { + matcher = matchEnv(expectedEnv) + } else { + matcher = value + } + + memo[key] = matcher + return memo + }, {} as Record) + } + + return spawnMock + .expects('spawn') + .withArgs( + commandName, + sinon.match.array.deepEquals(expectedArgs), + sinon.match(optionsMatchers), + ) + } +} diff --git a/packages/cli/test/unit/lib/utils/multisort.unit.test.ts b/packages/cli/test/unit/lib/utils/multisort.unit.test.ts new file mode 100644 index 0000000000..d853efcec8 --- /dev/null +++ b/packages/cli/test/unit/lib/utils/multisort.unit.test.ts @@ -0,0 +1,30 @@ +import {expect} from '@oclif/test' +import {multiSortCompareFn, type Comparator} from '../../../../src/lib/utils/multisort' + +describe('MultiSort', () => { + it('sorts based on precedence', () => { + type User = { + firstName: string + lastName: string + } + const comparators: Comparator[] = [ + (a: User, b: User) => a.firstName.localeCompare(b.firstName), + (a: User, b: User) => a.lastName.localeCompare(b.lastName), + ] + + const users: User[] = [ + {firstName: 'Jill', lastName: 'Kemp'}, + {firstName: 'Bill', lastName: 'Stevens'}, + {firstName: 'Jill', lastName: 'Ames'}, + {firstName: 'Bill', lastName: 'Bernard'}, + ] + const shouldBe: User[] = [ + {firstName: 'Bill', lastName: 'Bernard'}, + {firstName: 'Bill', lastName: 'Stevens'}, + {firstName: 'Jill', lastName: 'Ames'}, + {firstName: 'Jill', lastName: 'Kemp'}, + ] + users.sort(multiSortCompareFn(comparators)) // Bill Bernard, Bill Stevens, Jill Ames, Jill Kemp + expect(users).to.deep.eq(shouldBe) + }) +}) diff --git a/packages/pg-v5/index.js b/packages/pg-v5/index.js index f4a52281d8..1c19101d59 100644 --- a/packages/pg-v5/index.js +++ b/packages/pg-v5/index.js @@ -9,7 +9,6 @@ exports.topics = [ exports.commands = flatten([ require('./commands/backups/cancel'), require('./commands/backups/capture'), - require('./commands/bloat'), require('./commands/blocking'), require('./commands/connection_pooling'), require('./commands/copy'), diff --git a/yarn.lock b/yarn.lock index 66438132dd..a450b1b682 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5404,6 +5404,13 @@ __metadata: languageName: node linkType: hard +"@types/tmp@npm:^0.2.6": + version: 0.2.6 + resolution: "@types/tmp@npm:0.2.6" + checksum: 0b24bb6040cc289440a609e10ec99a704978c890a5828ff151576489090b2257ce2e2570b0f320ace9c8099c3642ea6221fbdf6d8f2e22b7cd1f4fbf6e989e3e + languageName: node + linkType: hard + "@types/tough-cookie@npm:*": version: 2.3.5 resolution: "@types/tough-cookie@npm:2.3.5" @@ -5411,6 +5418,15 @@ __metadata: languageName: node linkType: hard +"@types/tunnel-ssh@npm:4.1.1": + version: 4.1.1 + resolution: "@types/tunnel-ssh@npm:4.1.1" + dependencies: + "@types/node": "*" + checksum: 25beaca18691d83fbf115af0403788b6637e8034538d0b0f1720f05d116ccb4238cb0a22555a263bc5a766102dac1087f360ac6d054b838c6486fe27e11780f8 + languageName: node + linkType: hard + "@types/urijs@npm:^1.19.4": version: 1.19.4 resolution: "@types/urijs@npm:1.19.4" @@ -10956,6 +10972,8 @@ __metadata: "@types/std-mocks": ^1.0.4 "@types/strftime": ^0.9.8 "@types/supports-color": ^5.3.0 + "@types/tmp": ^0.2.6 + "@types/tunnel-ssh": 4.1.1 "@types/urijs": ^1.19.4 "@types/uuid": ^8.3.0 "@types/validator": ^10.9.0 @@ -11015,11 +11033,12 @@ __metadata: strftime: ^0.10.0 strip-ansi: 6.0.1 term-img: ^4.1.0 - tmp: ^0.0.33 + tmp: ^0.2.3 true-myth: 2.2.3 ts-node: ^10.9.1 tsheredoc: ^1.0.1 tslib: 1.14.1 + tunnel-ssh: 4.1.6 typescript: 4.8.4 urijs: ^1.19.11 uuid: 3.3.2 @@ -18115,6 +18134,13 @@ __metadata: languageName: node linkType: hard +"tmp@npm:^0.2.3": + version: 0.2.3 + resolution: "tmp@npm:0.2.3" + checksum: 73b5c96b6e52da7e104d9d44afb5d106bb1e16d9fa7d00dbeb9e6522e61b571fbdb165c756c62164be9a3bbe192b9b268c236d370a2a0955c7689cd2ae377b95 + languageName: node + linkType: hard + "to-fast-properties@npm:^2.0.0": version: 2.0.0 resolution: "to-fast-properties@npm:2.0.0" @@ -18380,7 +18406,7 @@ __metadata: languageName: node linkType: hard -"tunnel-ssh@npm:^4.1.6": +"tunnel-ssh@npm:4.1.6, tunnel-ssh@npm:^4.1.6": version: 4.1.6 resolution: "tunnel-ssh@npm:4.1.6" dependencies: