diff --git a/.changeset/thirty-hairs-thank.md b/.changeset/thirty-hairs-thank.md new file mode 100644 index 00000000..5226a7bd --- /dev/null +++ b/.changeset/thirty-hairs-thank.md @@ -0,0 +1,6 @@ +--- +'@transloadit/node': patch +'@transloadit/mcp-server': patch +--- + +Add home-credentials CLI support and release the MCP server alongside the updated Node package. diff --git a/docs/fingerprint/transloadit-baseline.json b/docs/fingerprint/transloadit-baseline.json index e044cb61..3d830488 100644 --- a/docs/fingerprint/transloadit-baseline.json +++ b/docs/fingerprint/transloadit-baseline.json @@ -1,13 +1,13 @@ { "packageDir": "packages/transloadit", "tarball": { - "filename": "transloadit-4.7.7.tgz", - "sizeBytes": 964327, - "sha256": "7b6156e32e95689084a3a00feef03339da945eaaad092fe25409dc61bf38ab8f" + "filename": "transloadit-4.8.0.tgz", + "sizeBytes": 970751, + "sha256": "b8fa380fdbfb8bacb09c0ec3ca704e2701b6f42ca840721e5f4600d1f264bc3c" }, "packageJson": { "name": "transloadit", - "version": "4.7.7", + "version": "4.8.0", "main": "./dist/Transloadit.js", "exports": { ".": "./dist/Transloadit.js", @@ -133,8 +133,8 @@ }, { "path": "dist/cli/commands/auth.js", - "sizeBytes": 11611, - "sha256": "f97edb26a829382471ba905d5004ac41caa7c0bca2a3a65b6adf6ff969803063" + "sizeBytes": 12418, + "sha256": "ee234995a4629d50586f7f92a8e324fa7d24f35ab96b95d32756f6f5f241be7e" }, { "path": "dist/alphalib/types/robots/azure-import.js", @@ -158,13 +158,13 @@ }, { "path": "dist/cli/commands/BaseCommand.js", - "sizeBytes": 1883, - "sha256": "af4d62f394df456f5137627e8c4f241344fbe7c7796e427bc6ec493de5984762" + "sizeBytes": 1886, + "sha256": "86179e1558f82f7863804face3aab70f3ccb492d395144117688eb29bb13ef79" }, { "path": "dist/bearerToken.js", - "sizeBytes": 5587, - "sha256": "bc4721aef6564fd8ffd8d7f7f74528ec2194a514c14bb382855215d0b9baa102" + "sizeBytes": 5732, + "sha256": "1fe9a30b6b737c36576a87c61e9b86c008d1a7d109341055fcfbc59aaee6e1b2" }, { "path": "dist/alphalib/types/bill.js", @@ -193,8 +193,8 @@ }, { "path": "dist/cli.js", - "sizeBytes": 1219, - "sha256": "b959001b789f7ebd47577de52ea067ea8115a2710b678f53bd479fadaba75aed" + "sizeBytes": 1316, + "sha256": "5d8de17e13f8d7b17cd1915be6deafccf62270b47a92fab879e2ea3314530e67" }, { "path": "dist/alphalib/types/robots/cloudfiles-import.js", @@ -353,8 +353,8 @@ }, { "path": "dist/cli/generateIntentDocs.js", - "sizeBytes": 12162, - "sha256": "88b738652c7b0ff7d37d3d8977d5ba5fc4ee291862be8b5ebbf25fa2e50f67c2" + "sizeBytes": 12474, + "sha256": "889889578951ca1a9b96f96a663fda853ddbff46af6122ac7b62caf8fbc158b7" }, { "path": "dist/alphalib/types/robots/google-import.js", @@ -368,8 +368,8 @@ }, { "path": "dist/cli/helpers.js", - "sizeBytes": 2625, - "sha256": "8555420c31526195f54151c481bd00d2a1fcf67dacbad3f988a4c6538fa67f21" + "sizeBytes": 10485, + "sha256": "24ba65d101c7a29c90769a397d5fa17bc4bac6f3d472d046867c591cb57a42dc" }, { "path": "dist/alphalib/types/robots/html-convert.js", @@ -834,7 +834,7 @@ { "path": "package.json", "sizeBytes": 2855, - "sha256": "5b92b07c9bd1125cd9bcc983de69e2be6fcc364d552479963b64adb3eb9e8148" + "sha256": "c19e4cc06170dad1e63a97d2e8abb0d61ad1549ba1ceb1f0b9bbd0b3ef47f587" }, { "path": "dist/alphalib/types/robots/_index.d.ts.map", @@ -1058,13 +1058,13 @@ }, { "path": "dist/cli/commands/auth.d.ts.map", - "sizeBytes": 922, - "sha256": "5723d078eacd9e3aa713aa28aa0b284f4c6a5f5b50203f39967a196542cf333f" + "sizeBytes": 924, + "sha256": "4415e108e361d5d7ddbf598ed32a5f58ad73f9c606ba20c801288575c0c53ebd" }, { "path": "dist/cli/commands/auth.js.map", - "sizeBytes": 10384, - "sha256": "3ea5009e5c9989b33694c725a9d11aa44f32b9b6c6dd73dc6b145d2318c8ce30" + "sizeBytes": 10744, + "sha256": "75823f4c7f60b9044bc1a0812993f15a3c1c3b25ad74f1a3f8c146b1c444153f" }, { "path": "dist/alphalib/types/robots/azure-import.d.ts.map", @@ -1108,23 +1108,23 @@ }, { "path": "dist/cli/commands/BaseCommand.d.ts.map", - "sizeBytes": 854, - "sha256": "7795327eb88bff243fbadaa473286c7d6c4bb24e4d0b11d7d7ff314dc9460112" + "sizeBytes": 833, + "sha256": "b0950ba79b15c784683370396137d92b5fac54dc480684e9aff833a8bb89ad27" }, { "path": "dist/cli/commands/BaseCommand.js.map", - "sizeBytes": 1771, - "sha256": "ea85d352f6620d0688afa1bb3c82f2c9c09b3bec6c06bfcc29d6d9f14f12578a" + "sizeBytes": 1740, + "sha256": "74855d31a36c89ede8a12c76a02e78038540cada20c2d5fdaa0d4d808a7d7648" }, { "path": "dist/bearerToken.d.ts.map", - "sizeBytes": 867, - "sha256": "bf66bc5231caabe6dea8993dacce769dbd5ebf6884d921d133dd461ab161a0ae" + "sizeBytes": 900, + "sha256": "c10732e07a2bbb0d47d9d40a0d755716384033e9b6839a0ec189fe6fb7e6aaab" }, { "path": "dist/bearerToken.js.map", - "sizeBytes": 5767, - "sha256": "0bf3817570b3aad5e23f507898ba8bce442d99355f94a0f9ffb7a4650bdd87b6" + "sizeBytes": 5867, + "sha256": "b584405efd14cd4e22920d6be5c66a21e2cb62352a17760bc8225918a98445d9" }, { "path": "dist/alphalib/types/bill.d.ts.map", @@ -1178,13 +1178,13 @@ }, { "path": "dist/cli.d.ts.map", - "sizeBytes": 293, - "sha256": "a9194c2c071b9b11546084324533c30a9188733778b0318be50f6a0f1917b6ae" + "sizeBytes": 273, + "sha256": "5735702149979dd281b9e657837c05397b14eabb50a78ecc9393e38a88da2471" }, { "path": "dist/cli.js.map", - "sizeBytes": 1408, - "sha256": "00a1c4a99a63ed2b06d9529979d476ffacc8594bad5d891e6a2245fabd0fdfea" + "sizeBytes": 1475, + "sha256": "afc2fcb54a541f3d923dcec829e8cd1955484a13c1d1f091e40c2027f6a6fc56" }, { "path": "dist/alphalib/types/robots/cloudfiles-import.d.ts.map", @@ -1499,12 +1499,12 @@ { "path": "dist/cli/generateIntentDocs.d.ts.map", "sizeBytes": 322, - "sha256": "fef7c62b2f31b146d88600a55ae2203411a1cf3e8919754ad9d643090b2b4508" + "sha256": "7a4fb5ab59c03c1fc20068db1d99c6c03efff5e7ffb1e8677bd6e7110d9456e4" }, { "path": "dist/cli/generateIntentDocs.js.map", - "sizeBytes": 10673, - "sha256": "0babc761e5669099b7df8af96ccb838ccc20501fe13bae2243c58b681ed3044c" + "sizeBytes": 10699, + "sha256": "bce7994f7bc8d8013de8cfc59fb9a97f87790932b28ae10b3aeeee661d2761e0" }, { "path": "dist/alphalib/types/robots/google-import.d.ts.map", @@ -1528,13 +1528,13 @@ }, { "path": "dist/cli/helpers.d.ts.map", - "sizeBytes": 1177, - "sha256": "0f65f407f35fe35bc23481feaf7146d051d3946557ae80008228844d563e41d5" + "sizeBytes": 1649, + "sha256": "2e1cb4c1c8921a53d6a90c20462aa5748709360b9ef7abf7558e557ca4eebd9f" }, { "path": "dist/cli/helpers.js.map", - "sizeBytes": 3067, - "sha256": "d644704a139a2ac26a8c7a2ba27b7eee939aecd8506d437ed616cc764eabe8c2" + "sizeBytes": 9948, + "sha256": "ee71fd0b983f8ef82a803afa8f0cbd195d18a0966d49916bb3f781b5b3a233c0" }, { "path": "dist/alphalib/types/robots/html-convert.d.ts.map", @@ -2458,8 +2458,8 @@ }, { "path": "README.md", - "sizeBytes": 84309, - "sha256": "6ab0c4d3904d56eb139e9bf290a45591a3428cb2363faf186a27a1bbcdb810d4" + "sizeBytes": 86005, + "sha256": "fb997dd8b6b5731339b5b710d769893e1ba1ace90d66d34d059f027c74e24b63" }, { "path": "dist/alphalib/types/robots/_index.d.ts", @@ -2688,8 +2688,8 @@ }, { "path": "src/cli/commands/auth.ts", - "sizeBytes": 12042, - "sha256": "c2d57b7a2b0cff9a4bcecf8ed8b8c52af664b9764d8a8248630c822f121f9a90" + "sizeBytes": 13156, + "sha256": "adfd11deb41f1bc9b0c42dcbde4e7542b97f515a6df7e05293e3839c7bab7ff8" }, { "path": "dist/alphalib/types/robots/azure-import.d.ts", @@ -2733,23 +2733,23 @@ }, { "path": "dist/cli/commands/BaseCommand.d.ts", - "sizeBytes": 944, - "sha256": "9f132cb73d644f225f0efa48ab02f0edc6a82c9db7db080e84ae2281d9cf845c" + "sizeBytes": 920, + "sha256": "9604da61ca909755a0c6574305e9e7f35150f7e0e8df47291877a28cf3cf3223" }, { "path": "src/cli/commands/BaseCommand.ts", - "sizeBytes": 2101, - "sha256": "8716f8a22898d35c025986a31a9234b43a8eaed09f7120b8f6424ff8d045fd50" + "sizeBytes": 2106, + "sha256": "0b3f59529a584dc382c4e21f9dafc5cd8b6b5a8d297221cbec2bfe479bbed344" }, { "path": "dist/bearerToken.d.ts", - "sizeBytes": 880, - "sha256": "9e4a6e4dff6fc60004e9d6d5d5637c0257345ca97b48ffd6225417a4eda94c80" + "sizeBytes": 927, + "sha256": "4d9b6cc1958365ef0377d1350b19522de395cd35f36f319527d0b3cd8adb0e38" }, { "path": "src/bearerToken.ts", - "sizeBytes": 5998, - "sha256": "528a38e470b6730ab073de1e845ec964855ffabfe862a4bca65af021732c4f1f" + "sizeBytes": 6207, + "sha256": "b9dbceaadeed8fd59aa1cf16db6e062a377c4a9256c9f6308488e0d173965646" }, { "path": "dist/alphalib/types/bill.d.ts", @@ -2803,13 +2803,13 @@ }, { "path": "dist/cli.d.ts", - "sizeBytes": 265, - "sha256": "84c403d5b19a2a87189fdf87a6a3b9d4f9dc23ff497f55ebacce6b72669adf8e" + "sizeBytes": 241, + "sha256": "b9aed044f9844b1140903619e189d41cb34ab6c72d457332e0b2f49936f43502" }, { "path": "src/cli.ts", - "sizeBytes": 1170, - "sha256": "757c3922b27c1d9c7fb2a496a66be1af298ed86b3e492fed6f43f7f08db1c8e0" + "sizeBytes": 1262, + "sha256": "dd56259d35c8072704dac8fe098275fa27bb72e0c22eea393b48cec4d05a816d" }, { "path": "dist/alphalib/types/robots/cloudfiles-import.d.ts", @@ -3128,8 +3128,8 @@ }, { "path": "src/cli/generateIntentDocs.ts", - "sizeBytes": 12525, - "sha256": "24ab29b2a991eaa39fb5af54d477a4a4b998dca3b286bbae86a71ebff461d933" + "sizeBytes": 12829, + "sha256": "482795d80dfa72b6ad61fb785097b347f7a3bc557646842ea8a738e26e07dfc8" }, { "path": "dist/alphalib/types/robots/google-import.d.ts", @@ -3153,13 +3153,13 @@ }, { "path": "dist/cli/helpers.d.ts", - "sizeBytes": 1073, - "sha256": "aeed9d1c1186c561cd846c905bac2d9738e88c80e5527c125f47ad6b332d10ee" + "sizeBytes": 1663, + "sha256": "15bb0db74f1067923856eb6f9ea4eca451a7ef90dffdef50cf5bf9120a64368f" }, { "path": "src/cli/helpers.ts", - "sizeBytes": 3340, - "sha256": "9741aa20b83f837889d248d5b095e6ec2336186bc5ab2b6caa23174950562919" + "sizeBytes": 11714, + "sha256": "2b9139aa13729a0f4c71062d68fca0872213f31fa901c5032695be2dd4783373" }, { "path": "dist/alphalib/types/robots/html-convert.d.ts", diff --git a/docs/fingerprint/transloadit-baseline.package.json b/docs/fingerprint/transloadit-baseline.package.json index b80e69d3..8504af30 100644 --- a/docs/fingerprint/transloadit-baseline.package.json +++ b/docs/fingerprint/transloadit-baseline.package.json @@ -1,6 +1,6 @@ { "name": "transloadit", - "version": "4.7.7", + "version": "4.8.0", "description": "Node.js SDK for Transloadit", "homepage": "https://github.com/transloadit/node-sdk/tree/main/packages/node", "bugs": { diff --git a/packages/node/README.md b/packages/node/README.md index c76c3390..347ffb2b 100644 --- a/packages/node/README.md +++ b/packages/node/README.md @@ -51,22 +51,66 @@ This package includes a full-featured CLI for interacting with Transloadit from ### Quick Start +Pick one auth setup and then run the CLI. + +Use shell env vars: + ```bash -# Set your credentials export TRANSLOADIT_KEY="YOUR_TRANSLOADIT_KEY" export TRANSLOADIT_SECRET="YOUR_TRANSLOADIT_SECRET" +``` + +Or create a reusable home credentials file: + +```bash +mkdir -p ~/.transloadit +cat > ~/.transloadit/credentials <<'EOF' +TRANSLOADIT_KEY="YOUR_TRANSLOADIT_KEY" +TRANSLOADIT_SECRET="YOUR_TRANSLOADIT_SECRET" +EOF +chmod 600 ~/.transloadit/credentials +``` + +Then see all available commands: -# See all available commands +```bash npx -y @transloadit/node --help ``` The CLI binary is still called `transloadit`, so command examples below may use `npx transloadit ...`. +### Credential Resolution + +The CLI resolves authentication in this order: + +1. Shell environment variables such as `TRANSLOADIT_KEY`, `TRANSLOADIT_SECRET`, and `TRANSLOADIT_AUTH_TOKEN` +2. The current working directory `.env` +3. `~/.transloadit/credentials` + +The home credentials file uses dotenv syntax. It is meant for user-level CLI use, so Intents and +other commands work from any directory on your machine without having to export credentials first. + +Example `~/.transloadit/credentials`: + +```env +TRANSLOADIT_KEY="YOUR_TRANSLOADIT_KEY" +TRANSLOADIT_SECRET="YOUR_TRANSLOADIT_SECRET" +# Optional: +# TRANSLOADIT_ENDPOINT="https://api2.transloadit.com" +# TRANSLOADIT_AUTH_TOKEN="YOUR_BEARER_TOKEN" +``` + +If you want to use a different path, set `TRANSLOADIT_CREDENTIALS_FILE=/abs/path/to/credentials.env`. + +Most commands can authenticate with either `TRANSLOADIT_AUTH_TOKEN` or `TRANSLOADIT_KEY` + +`TRANSLOADIT_SECRET`. Commands that mint bearer tokens or generate signatures still require +`TRANSLOADIT_KEY` and `TRANSLOADIT_SECRET`. + ### Minting Bearer Tokens (Hosted MCP) If you want to connect an agent to the Transloadit-hosted MCP endpoint, mint a short-lived bearer -token via `POST /token`: +token via `POST /token`. This command also uses the same credential resolution order above: ```bash # Prints JSON to stdout (stderr may include npx/npm noise) @@ -93,6 +137,8 @@ The full generated intent reference also lives in [`docs/intent-commands.md`](./ #### At a glance Intent commands are the fastest path to common one-off tasks from the CLI. +Authentication is resolved in this order: shell environment, the current working directory `.env`, then `~/.transloadit/credentials`. +The home credentials file uses dotenv syntax and can include `TRANSLOADIT_KEY`, `TRANSLOADIT_SECRET`, `TRANSLOADIT_ENDPOINT`, and `TRANSLOADIT_AUTH_TOKEN`. Use `--print-urls` when you want temporary result URLs without downloading locally. All intent commands also support the global CLI flags `--json`, `--log-level`, `--endpoint`, and `--help`. @@ -1762,3 +1808,5 @@ See [CONTRIBUTING](./CONTRIBUTING.md). + + diff --git a/packages/node/docs/intent-commands.md b/packages/node/docs/intent-commands.md index 5a0e4a5f..a7ace25a 100644 --- a/packages/node/docs/intent-commands.md +++ b/packages/node/docs/intent-commands.md @@ -5,6 +5,8 @@ ## At a glance Intent commands are the fastest path to common one-off tasks from the CLI. +Authentication is resolved in this order: shell environment, the current working directory `.env`, then `~/.transloadit/credentials`. +The home credentials file uses dotenv syntax and can include `TRANSLOADIT_KEY`, `TRANSLOADIT_SECRET`, `TRANSLOADIT_ENDPOINT`, and `TRANSLOADIT_AUTH_TOKEN`. Use `--print-urls` when you want temporary result URLs without downloading locally. All intent commands also support the global CLI flags `--json`, `--log-level`, `--endpoint`, and `--help`. diff --git a/packages/node/src/bearerToken.ts b/packages/node/src/bearerToken.ts index a11126ec..956caba0 100644 --- a/packages/node/src/bearerToken.ts +++ b/packages/node/src/bearerToken.ts @@ -10,6 +10,7 @@ export type BearerTokenResponse = { } export type MintBearerTokenOptions = { + allowProcessEnvEndpointFallback?: boolean endpoint?: string aud?: BearerTokenAudience | string /** @@ -49,8 +50,15 @@ const isLoopbackHost = (hostname: string): boolean => type TokenBaseResult = { ok: true; baseUrl: URL } | { ok: false; error: string } -const normalizeTokenBaseEndpoint = (raw?: string): TokenBaseResult => { - const baseRaw = (raw || process.env.TRANSLOADIT_ENDPOINT || 'https://api2.transloadit.com').trim() +const normalizeTokenBaseEndpoint = ( + raw?: string, + allowProcessEnvEndpointFallback = true, +): TokenBaseResult => { + const baseRaw = ( + raw || + (allowProcessEnvEndpointFallback ? process.env.TRANSLOADIT_ENDPOINT : undefined) || + 'https://api2.transloadit.com' + ).trim() let url: URL try { @@ -118,7 +126,10 @@ export async function mintBearerTokenWithCredentials( credentials: { authKey: string; authSecret: string }, options: MintBearerTokenOptions = {}, ): Promise { - const endpointResult = normalizeTokenBaseEndpoint(options.endpoint) + const endpointResult = normalizeTokenBaseEndpoint( + options.endpoint, + options.allowProcessEnvEndpointFallback, + ) if (!endpointResult.ok) { return { ok: false, error: endpointResult.error } } diff --git a/packages/node/src/cli.ts b/packages/node/src/cli.ts index d7eedc1e..d1b73197 100644 --- a/packages/node/src/cli.ts +++ b/packages/node/src/cli.ts @@ -4,8 +4,7 @@ import { realpathSync } from 'node:fs' import path from 'node:path' import process from 'node:process' import { fileURLToPath } from 'node:url' -import 'dotenv/config' -import { createCli } from './cli/commands/index.ts' +import { loadProjectDotenvIntoProcessEnv } from './cli/helpers.ts' import { ensureError } from './cli/types.ts' const currentFile = realpathSync(fileURLToPath(import.meta.url)) @@ -26,6 +25,8 @@ export function shouldRunCli(invoked?: string): boolean { } export async function main(args = process.argv.slice(2)): Promise { + loadProjectDotenvIntoProcessEnv() + const { createCli } = await import('./cli/commands/index.ts') const cli = createCli() const exitCode = await cli.run(args) if (exitCode !== 0) { diff --git a/packages/node/src/cli/commands/BaseCommand.ts b/packages/node/src/cli/commands/BaseCommand.ts index fee02d72..00af3e31 100644 --- a/packages/node/src/cli/commands/BaseCommand.ts +++ b/packages/node/src/cli/commands/BaseCommand.ts @@ -1,8 +1,6 @@ -import 'dotenv/config' -import process from 'node:process' import { Command, Option } from 'clipanion' import { Transloadit as TransloaditClient } from '../../Transloadit.ts' -import { requireEnvCredentials } from '../helpers.ts' +import { buildMissingAuthMessage, resolveCliConfig } from '../helpers.ts' import type { IOutputCtl } from '../OutputCtl.ts' import OutputCtl, { LOG_LEVEL_DEFAULT, LOG_LEVEL_NAMES, parseLogLevel } from '../OutputCtl.ts' @@ -17,7 +15,7 @@ abstract class BaseCommand extends Command { endpoint = Option.String('--endpoint', { description: - 'API endpoint URL (default: https://api2.transloadit.com, or TRANSLOADIT_ENDPOINT env var)', + 'API endpoint URL (default: https://api2.transloadit.com, or TRANSLOADIT_ENDPOINT from the environment, .env, or ~/.transloadit/credentials)', }) protected output!: IOutputCtl @@ -32,16 +30,16 @@ abstract class BaseCommand extends Command { } protected setupClient(): boolean { - const credsResult = requireEnvCredentials() - if (!credsResult.ok) { - this.output.error(credsResult.error) + const config = resolveCliConfig() + if (config.auth == null) { + this.output.error(config.loadError ?? buildMissingAuthMessage()) return false } - const endpoint = this.endpoint || process.env.TRANSLOADIT_ENDPOINT + const endpoint = this.endpoint || config.endpoint this.client = new TransloaditClient({ - ...credsResult.credentials, + ...config.auth, ...(endpoint && { endpoint }), }) return true diff --git a/packages/node/src/cli/commands/auth.ts b/packages/node/src/cli/commands/auth.ts index e68187e9..2147f029 100644 --- a/packages/node/src/cli/commands/auth.ts +++ b/packages/node/src/cli/commands/auth.ts @@ -9,7 +9,14 @@ import { import type { OptionalAuthParams } from '../../apiTypes.ts' import { mintBearerTokenWithCredentials } from '../../bearerToken.ts' import { Transloadit } from '../../Transloadit.ts' -import { readCliInput, requireEnvCredentials } from '../helpers.ts' +import type { CliKeySecretCredentials, ResolvedCliConfig } from '../helpers.ts' +import { + buildMissingCredentialsMessage, + readCliInput, + requireCliCredentials, + resolveCliConfig, +} from '../helpers.ts' +import type { IOutputCtl } from '../OutputCtl.ts' import { UnauthenticatedCommand } from './BaseCommand.ts' type UrlParamPrimitive = string | number | boolean @@ -195,14 +202,49 @@ export interface RunSmartSigOptions { providedInput?: string } -export async function runSig(options: RunSigOptions = {}): Promise { - const credentialsResult = requireEnvCredentials() +function reportStandaloneCredentialsError(error: string): null { + console.error(error) + process.exitCode = 1 + return null +} + +function getStandaloneCredentials(): CliKeySecretCredentials | null { + const credentialsResult = requireCliCredentials() if (!credentialsResult.ok) { - console.error(credentialsResult.error) - process.exitCode = 1 + return reportStandaloneCredentialsError(credentialsResult.error) + } + + return credentialsResult.credentials +} + +function getCommandCredentials(output: IOutputCtl): CliKeySecretCredentials | null { + const credentialsResult = requireCliCredentials() + if (!credentialsResult.ok) { + output.error(credentialsResult.error) + return null + } + + return credentialsResult.credentials +} + +function getCommandConfigWithCredentials(output: IOutputCtl): { + config: ResolvedCliConfig + credentials: CliKeySecretCredentials +} | null { + const config = resolveCliConfig() + if (config.credentials == null) { + output.error(config.loadError ?? buildMissingCredentialsMessage()) + return null + } + + return { config, credentials: config.credentials } +} + +export async function runSig(options: RunSigOptions = {}): Promise { + const credentials = getStandaloneCredentials() + if (credentials == null) { return } - const credentials = credentialsResult.credentials const { content } = await readCliInput({ providedInput: options.providedInput, @@ -220,13 +262,10 @@ export async function runSig(options: RunSigOptions = {}): Promise { } export async function runSmartSig(options: RunSmartSigOptions = {}): Promise { - const credentialsResult = requireEnvCredentials() - if (!credentialsResult.ok) { - console.error(credentialsResult.error) - process.exitCode = 1 + const credentials = getStandaloneCredentials() + if (credentials == null) { return } - const credentials = credentialsResult.credentials const { content } = await readCliInput({ providedInput: options.providedInput, @@ -260,6 +299,8 @@ export class SignatureCommand extends UnauthenticatedCommand { details: ` Read params JSON from stdin and output signed payload JSON. If no input is provided, generates a signature with default params. + Credentials are resolved from the shell environment, the current working directory .env, or + ~/.transloadit/credentials. `, examples: [ ['Generate signature', 'echo \'{"steps":{}}\' | transloadit signature'], @@ -273,12 +314,8 @@ export class SignatureCommand extends UnauthenticatedCommand { }) protected async run(): Promise { - const credentialsResult = requireEnvCredentials() - if (!credentialsResult.ok) { - this.output.error(credentialsResult.error) - return 1 - } - const credentials = credentialsResult.credentials + const credentials = getCommandCredentials(this.output) + if (credentials == null) return 1 const { content } = await readCliInput({ allowStdinWhenNoPath: true }) const rawInput = (content ?? '').trim() @@ -312,6 +349,8 @@ export class SmartCdnSignatureCommand extends UnauthenticatedCommand { Read Smart CDN params JSON from stdin and output a signed URL. Required fields: workspace, template, input Optional fields: expire_at_ms, url_params + Credentials are resolved from the shell environment, the current working directory .env, or + ~/.transloadit/credentials. `, examples: [ [ @@ -326,12 +365,8 @@ export class SmartCdnSignatureCommand extends UnauthenticatedCommand { }) protected async run(): Promise { - const credentialsResult = requireEnvCredentials() - if (!credentialsResult.ok) { - this.output.error(credentialsResult.error) - return 1 - } - const credentials = credentialsResult.credentials + const credentials = getCommandCredentials(this.output) + if (credentials == null) return 1 const { content } = await readCliInput({ allowStdinWhenNoPath: true }) const rawInput = (content ?? '').trim() @@ -359,8 +394,9 @@ export class TokenCommand extends UnauthenticatedCommand { category: 'Auth', description: 'Mint a short-lived bearer token', details: ` - Calls POST /token using HTTP Basic Auth (TRANSLOADIT_KEY + TRANSLOADIT_SECRET) and prints the - JSON response to stdout. + Calls POST /token using HTTP Basic Auth and prints the JSON response to stdout. + Credentials are resolved from the shell environment, the current working directory .env, or + ~/.transloadit/credentials. `, examples: [ ['Mint an MCP token (default aud)', 'transloadit auth token'], @@ -378,14 +414,12 @@ export class TokenCommand extends UnauthenticatedCommand { }) protected override async run(): Promise { - const credentialsResult = requireEnvCredentials() - if (!credentialsResult.ok) { - this.output.error(credentialsResult.error) - return 1 - } + const resolved = getCommandConfigWithCredentials(this.output) + if (resolved == null) return 1 - const result = await mintBearerTokenWithCredentials(credentialsResult.credentials, { - endpoint: this.endpoint, + const result = await mintBearerTokenWithCredentials(resolved.credentials, { + allowProcessEnvEndpointFallback: false, + endpoint: this.endpoint ?? resolved.config.credentialsEndpoint, aud: this.aud, scope: this.scope, }) diff --git a/packages/node/src/cli/generateIntentDocs.ts b/packages/node/src/cli/generateIntentDocs.ts index 37d7fb0b..8c0b3ca6 100644 --- a/packages/node/src/cli/generateIntentDocs.ts +++ b/packages/node/src/cli/generateIntentDocs.ts @@ -342,6 +342,8 @@ export function renderIntentDocsBody({ `${heading} At a glance`, '', 'Intent commands are the fastest path to common one-off tasks from the CLI.', + 'Authentication is resolved in this order: shell environment, the current working directory `.env`, then `~/.transloadit/credentials`.', + 'The home credentials file uses dotenv syntax and can include `TRANSLOADIT_KEY`, `TRANSLOADIT_SECRET`, `TRANSLOADIT_ENDPOINT`, and `TRANSLOADIT_AUTH_TOKEN`.', 'Use `--print-urls` when you want temporary result URLs without downloading locally.', 'All intent commands also support the global CLI flags `--json`, `--log-level`, `--endpoint`, and `--help`.', '', diff --git a/packages/node/src/cli/helpers.ts b/packages/node/src/cli/helpers.ts index b0eb40cb..e9fa0670 100644 --- a/packages/node/src/cli/helpers.ts +++ b/packages/node/src/cli/helpers.ts @@ -1,30 +1,295 @@ import fs from 'node:fs' import fsp from 'node:fs/promises' +import { homedir } from 'node:os' +import path from 'node:path' import type { Readable } from 'node:stream' +import { parse as parseDotenv } from 'dotenv' import { isAPIError } from './types.ts' -const MISSING_CREDENTIALS_MESSAGE = - 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.' +export type CliKeySecretCredentials = { authKey: string; authSecret: string } +export type CliAuthToken = { authToken: string } +export type CliAuth = CliKeySecretCredentials | CliAuthToken +type CliEnvSource = { + name: 'env' | 'credentialsFile' + values: Record +} + +let loadedProjectDotenvPath: string | undefined +let projectDotenvInjectedValues: Record | undefined +let projectDotenvPreviousValues: Record | undefined +let shellEnvBeforeProjectDotenv: Record | undefined + +type LoadCliEnvSourcesResult = { + loadError?: string + shellEnvSource: CliEnvSource + sources: CliEnvSource[] +} + +export type ResolvedCliConfig = { + auth?: CliAuth + credentials?: CliKeySecretCredentials + credentialsEndpoint?: string + endpoint?: string + loadError?: string +} + +function normalizeEnvValue(value: string | undefined): string | undefined { + const trimmed = value?.trim() + return trimmed ? trimmed : undefined +} + +function getConfiguredCredentialsFilePath(): string { + const configuredPath = normalizeEnvValue(process.env.TRANSLOADIT_CREDENTIALS_FILE) + if (configuredPath != null) { + return path.resolve(configuredPath) + } + + return path.join(homedir(), '.transloadit', 'credentials') +} + +function getProjectDotenvPath(): string { + return path.resolve(process.cwd(), '.env') +} + +function getDisplayPath(filePath: string): string { + const normalizedHome = path.resolve(homedir()) + const normalizedFilePath = path.resolve(filePath) + if (normalizedFilePath === normalizedHome) return '~' + if (normalizedFilePath.startsWith(`${normalizedHome}${path.sep}`)) { + return `~${normalizedFilePath.slice(normalizedHome.length)}` + } + + return normalizedFilePath +} + +export function buildMissingCredentialsMessage(): string { + return [ + 'Missing credentials.', + '', + 'Looked for TRANSLOADIT_KEY + TRANSLOADIT_SECRET in this order:', + '1. Shell env: TRANSLOADIT_KEY / TRANSLOADIT_SECRET', + `2. Current directory .env: ${getProjectDotenvPath()}`, + `3. Credentials file: ${getDisplayPath(getConfiguredCredentialsFilePath())}`, + ].join('\n') +} + +export function buildMissingAuthMessage(): string { + return [ + 'Missing authentication.', + '', + 'Looked for TRANSLOADIT_AUTH_TOKEN or TRANSLOADIT_KEY + TRANSLOADIT_SECRET in this order:', + '1. Shell env: TRANSLOADIT_AUTH_TOKEN, or TRANSLOADIT_KEY / TRANSLOADIT_SECRET', + `2. Current directory .env: ${getProjectDotenvPath()}`, + `3. Credentials file: ${getDisplayPath(getConfiguredCredentialsFilePath())}`, + ].join('\n') +} + +function readEnvFile( + filePath: string, +): { ok: true; source: CliEnvSource } | { ok: false; error: string } | null { + if (!fs.existsSync(filePath)) return null + + try { + return { + ok: true, + source: { + name: 'credentialsFile', + values: parseDotenv(fs.readFileSync(filePath)), + }, + } + } catch (err) { + if (!(err instanceof Error)) { + throw new Error(`Was thrown a non-error: ${err}`) + } + return { ok: false, error: `Failed to read ${filePath}: ${err.message}` } + } +} + +export function loadProjectDotenvIntoProcessEnv(): string | undefined { + const projectDotenvPath = getProjectDotenvPath() + if (loadedProjectDotenvPath !== projectDotenvPath) { + restoreProjectDotenvFromProcessEnv() + shellEnvBeforeProjectDotenv = { ...process.env } + loadedProjectDotenvPath = projectDotenvPath + } + + const projectDotenvResult = readEnvFile(projectDotenvPath) + if (projectDotenvResult == null) { + restoreProjectDotenvFromProcessEnv() + projectDotenvInjectedValues = undefined + projectDotenvPreviousValues = undefined + return undefined + } + + if (!projectDotenvResult.ok) return projectDotenvResult.error + if (projectDotenvInjectedValues != null) return undefined + + const previousValues: Record = {} + const injectedValues: Record = {} + for (const [key, value] of Object.entries(projectDotenvResult.source.values)) { + if (value == null) continue + if (normalizeEnvValue(process.env[key]) != null) continue + previousValues[key] = process.env[key] + process.env[key] = value + injectedValues[key] = value + } + + projectDotenvPreviousValues = previousValues + projectDotenvInjectedValues = injectedValues + return undefined +} + +function getShellEnvValues(): Record { + if (loadedProjectDotenvPath === getProjectDotenvPath() && shellEnvBeforeProjectDotenv != null) { + return shellEnvBeforeProjectDotenv + } + + return { ...process.env } +} + +function restoreProjectDotenvFromProcessEnv(): void { + if (projectDotenvInjectedValues == null || projectDotenvPreviousValues == null) return + + for (const [key, injectedValue] of Object.entries(projectDotenvInjectedValues)) { + if (process.env[key] !== injectedValue) continue + + const previousValue = projectDotenvPreviousValues[key] + if (previousValue == null) { + delete process.env[key] + continue + } + + process.env[key] = previousValue + } + + projectDotenvInjectedValues = undefined + projectDotenvPreviousValues = undefined +} + +function loadCliEnvSources(): LoadCliEnvSourcesResult { + const shellEnvSource: CliEnvSource = { + name: 'env', + values: getShellEnvValues(), + } + const loadErrors: string[] = [] + + const projectDotenvLoadError = loadProjectDotenvIntoProcessEnv() + if (projectDotenvLoadError != null) { + loadErrors.push(projectDotenvLoadError) + } + + const sources: CliEnvSource[] = [ + { + name: 'env', + values: { ...process.env }, + }, + ] + + const credentialsFilePath = getConfiguredCredentialsFilePath() + const credentialsFileResult = readEnvFile(credentialsFilePath) + if (credentialsFileResult?.ok === true) { + sources.push(credentialsFileResult.source) + } else if (credentialsFileResult?.ok === false) { + loadErrors.push(credentialsFileResult.error) + } else if (normalizeEnvValue(process.env.TRANSLOADIT_CREDENTIALS_FILE) != null) { + loadErrors.push(`Configured credentials file does not exist: ${credentialsFilePath}`) + } + + return { + shellEnvSource, + sources, + ...(loadErrors[0] ? { loadError: loadErrors[0] } : {}), + } +} -type EnvCredentials = { authKey: string; authSecret: string } +function getSourceValue(source: CliEnvSource, keys: string[]): string | undefined { + for (const key of keys) { + const value = normalizeEnvValue(source.values[key]) + if (value != null) return value + } -function getEnvCredentials(): { authKey: string; authSecret: string } | null { - const authKey = process.env.TRANSLOADIT_KEY ?? process.env.TRANSLOADIT_AUTH_KEY - const authSecret = process.env.TRANSLOADIT_SECRET ?? process.env.TRANSLOADIT_AUTH_SECRET + return undefined +} - if (!authKey || !authSecret) return null +function getSourceCredentials(source: CliEnvSource): CliKeySecretCredentials | undefined { + const authKey = getSourceValue(source, ['TRANSLOADIT_KEY', 'TRANSLOADIT_AUTH_KEY']) + const authSecret = getSourceValue(source, ['TRANSLOADIT_SECRET', 'TRANSLOADIT_AUTH_SECRET']) + if (authKey == null || authSecret == null) return undefined return { authKey, authSecret } } -type RequireEnvCredentialsResult = - | { ok: true; credentials: EnvCredentials } +function getSourceAuthToken(source: CliEnvSource): CliAuthToken | undefined { + const authToken = getSourceValue(source, ['TRANSLOADIT_AUTH_TOKEN']) + if (authToken == null) return undefined + + return { authToken } +} + +function resolveEndpointForSource( + source: CliEnvSource | undefined, + shellEnvSource: CliEnvSource, +): string | undefined { + const shellEndpoint = getSourceValue(shellEnvSource, ['TRANSLOADIT_ENDPOINT']) + if (shellEndpoint != null) return shellEndpoint + if (source == null) return undefined + + return getSourceValue(source, ['TRANSLOADIT_ENDPOINT']) +} + +export function resolveCliConfig(): ResolvedCliConfig { + const { loadError, shellEnvSource, sources } = loadCliEnvSources() + let auth: CliAuth | undefined + let authSource: CliEnvSource | undefined + let credentials: CliKeySecretCredentials | undefined + let credentialsSource: CliEnvSource | undefined + + for (const source of sources) { + if (auth == null) { + const authToken = getSourceAuthToken(source) + if (authToken != null) { + auth = authToken + authSource = source + } else { + const sourceCredentials = getSourceCredentials(source) + if (sourceCredentials != null) { + auth = sourceCredentials + authSource = source + } + } + } + + if (credentials != null) continue + + const sourceCredentials = getSourceCredentials(source) + if (sourceCredentials != null) { + credentials = sourceCredentials + credentialsSource = source + } + } + + return { + ...(auth != null ? { auth } : {}), + ...(credentials != null ? { credentials } : {}), + ...(authSource != null + ? { endpoint: resolveEndpointForSource(authSource, shellEnvSource) } + : {}), + ...(credentialsSource != null + ? { credentialsEndpoint: resolveEndpointForSource(credentialsSource, shellEnvSource) } + : {}), + ...(loadError != null ? { loadError } : {}), + } +} + +type RequireCliCredentialsResult = + | { ok: true; credentials: CliKeySecretCredentials } | { ok: false; error: string } -export function requireEnvCredentials(): RequireEnvCredentialsResult { - const credentials = getEnvCredentials() - if (credentials == null) return { ok: false, error: MISSING_CREDENTIALS_MESSAGE } - return { ok: true, credentials } +export function requireCliCredentials(): RequireCliCredentialsResult { + const { credentials, loadError } = resolveCliConfig() + if (credentials != null) return { ok: true, credentials } + if (loadError != null) return { ok: false, error: loadError } + return { ok: false, error: buildMissingCredentialsMessage() } } export function createReadStream(file: string): Readable { diff --git a/packages/node/test/unit/cli/credentials-resolution.test.ts b/packages/node/test/unit/cli/credentials-resolution.test.ts new file mode 100644 index 00000000..2cc36696 --- /dev/null +++ b/packages/node/test/unit/cli/credentials-resolution.test.ts @@ -0,0 +1,418 @@ +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs' +import { tmpdir } from 'node:os' +import path from 'node:path' +import { afterEach, describe, expect, it, vi } from 'vitest' +import { runSig } from '../../../src/cli/commands/auth.ts' +import OutputCtl from '../../../src/cli/OutputCtl.ts' +import { main } from '../../../src/cli.ts' +import { Transloadit } from '../../../src/Transloadit.ts' + +const originalCwd = process.cwd() + +const resetExitCode = () => { + process.exitCode = undefined +} + +function createCliFixture(): { + cleanup: () => void + credentialsFilePath: string + cwd: string + home: string + root: string +} { + const root = mkdtempSync(path.join(tmpdir(), 'transloadit-cli-auth-')) + const home = path.join(root, 'home') + const cwd = path.join(root, 'workspace') + const credentialsDir = path.join(home, '.transloadit') + const credentialsFilePath = path.join(credentialsDir, 'credentials') + + mkdirSync(credentialsDir, { recursive: true }) + mkdirSync(cwd, { recursive: true }) + + return { + root, + home, + cwd, + credentialsFilePath, + cleanup: () => { + rmSync(root, { recursive: true, force: true }) + }, + } +} + +afterEach(() => { + process.chdir(originalCwd) + vi.restoreAllMocks() + vi.unstubAllEnvs() + resetExitCode() +}) + +function clearAmbientTransloaditEnv(): void { + vi.stubEnv('TRANSLOADIT_KEY', '') + vi.stubEnv('TRANSLOADIT_SECRET', '') + vi.stubEnv('TRANSLOADIT_AUTH_KEY', '') + vi.stubEnv('TRANSLOADIT_AUTH_SECRET', '') + vi.stubEnv('TRANSLOADIT_AUTH_TOKEN', '') + vi.stubEnv('TRANSLOADIT_ENDPOINT', '') +} + +describe('cli credential resolution', () => { + it('uses ~/.transloadit/credentials when shell env and .env are absent', async () => { + const fixture = createCliFixture() + writeFileSync( + fixture.credentialsFilePath, + ['TRANSLOADIT_KEY=home-key', 'TRANSLOADIT_SECRET=home-secret'].join('\n'), + ) + + clearAmbientTransloaditEnv() + vi.stubEnv('TRANSLOADIT_CREDENTIALS_FILE', fixture.credentialsFilePath) + process.chdir(fixture.cwd) + + const stdoutSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + const stderrSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) + + try { + await runSig({ + providedInput: JSON.stringify({ auth: { expires: '2025-01-03T00:00:00.000Z' } }), + }) + + expect(stderrSpy).not.toHaveBeenCalled() + const output = JSON.parse(`${stdoutSpy.mock.calls[0]?.[0]}`.trim()) + const params = JSON.parse(output.params as string) + expect(params.auth?.key).toBe('home-key') + expect(process.exitCode).toBeUndefined() + } finally { + fixture.cleanup() + } + }) + + it('prefers the current working directory .env over ~/.transloadit/credentials', async () => { + const fixture = createCliFixture() + writeFileSync( + fixture.credentialsFilePath, + ['TRANSLOADIT_KEY=home-key', 'TRANSLOADIT_SECRET=home-secret'].join('\n'), + ) + writeFileSync( + path.join(fixture.cwd, '.env'), + ['TRANSLOADIT_KEY=dotenv-key', 'TRANSLOADIT_SECRET=dotenv-secret'].join('\n'), + ) + + clearAmbientTransloaditEnv() + vi.stubEnv('TRANSLOADIT_CREDENTIALS_FILE', fixture.credentialsFilePath) + process.chdir(fixture.cwd) + + const stdoutSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + vi.spyOn(console, 'error').mockImplementation(() => {}) + + try { + await runSig({ + providedInput: JSON.stringify({ auth: { expires: '2025-01-03T00:00:00.000Z' } }), + }) + + const output = JSON.parse(`${stdoutSpy.mock.calls[0]?.[0]}`.trim()) + const params = JSON.parse(output.params as string) + expect(params.auth?.key).toBe('dotenv-key') + } finally { + fixture.cleanup() + } + }) + + it('supports TRANSLOADIT_CREDENTIALS_FILE as an override', async () => { + const fixture = createCliFixture() + const explicitFilePath = path.join(fixture.root, 'custom.env') + writeFileSync( + explicitFilePath, + ['TRANSLOADIT_KEY=custom-key', 'TRANSLOADIT_SECRET=custom-secret'].join('\n'), + ) + + clearAmbientTransloaditEnv() + vi.stubEnv('TRANSLOADIT_CREDENTIALS_FILE', explicitFilePath) + process.chdir(fixture.cwd) + + const stdoutSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + vi.spyOn(console, 'error').mockImplementation(() => {}) + + try { + await runSig({ + providedInput: JSON.stringify({ auth: { expires: '2025-01-03T00:00:00.000Z' } }), + }) + + const output = JSON.parse(`${stdoutSpy.mock.calls[0]?.[0]}`.trim()) + const params = JSON.parse(output.params as string) + expect(params.auth?.key).toBe('custom-key') + } finally { + fixture.cleanup() + } + }) + + it('uses TRANSLOADIT_AUTH_TOKEN from ~/.transloadit/credentials for authenticated commands', async () => { + const fixture = createCliFixture() + writeFileSync( + fixture.credentialsFilePath, + ['TRANSLOADIT_AUTH_TOKEN=home-token', 'TRANSLOADIT_ENDPOINT=https://api2.example.test'].join( + '\n', + ), + ) + + clearAmbientTransloaditEnv() + vi.stubEnv('TRANSLOADIT_CREDENTIALS_FILE', fixture.credentialsFilePath) + process.chdir(fixture.cwd) + + const listSpy = vi + .spyOn(Transloadit.prototype, 'listTemplates') + .mockImplementation(function () { + expect(Reflect.get(this, '_authToken')).toBe('home-token') + expect(Reflect.get(this, '_authKey')).toBe('') + expect(Reflect.get(this, '_endpoint')).toBe('https://api2.example.test') + return Promise.resolve({ items: [], count: 0 }) + }) + + vi.spyOn(OutputCtl.prototype, 'print').mockImplementation(() => {}) + vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + + try { + await main(['templates', 'list']) + + expect(listSpy).toHaveBeenCalled() + expect(process.exitCode).toBeUndefined() + } finally { + fixture.cleanup() + } + }) + + it('uses TRANSLOADIT_ENDPOINT from ~/.transloadit/credentials when minting tokens', async () => { + const fixture = createCliFixture() + writeFileSync( + fixture.credentialsFilePath, + [ + 'TRANSLOADIT_KEY=home-key', + 'TRANSLOADIT_SECRET=home-secret', + 'TRANSLOADIT_ENDPOINT=https://api2.example.test', + ].join('\n'), + ) + + clearAmbientTransloaditEnv() + vi.stubEnv('TRANSLOADIT_CREDENTIALS_FILE', fixture.credentialsFilePath) + process.chdir(fixture.cwd) + + const fetchSpy = vi.fn( + async () => + new Response(JSON.stringify({ access_token: 'abc', token_type: 'Bearer', expires_in: 1 }), { + status: 200, + headers: { 'content-type': 'application/json' }, + }), + ) + vi.stubGlobal('fetch', fetchSpy as unknown as typeof fetch) + vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + vi.spyOn(console, 'error').mockImplementation(() => {}) + + try { + await main(['auth', 'token']) + + const [url] = fetchSpy.mock.calls[0] as [string, RequestInit] + expect(url).toBe('https://api2.example.test/token') + expect(process.exitCode).toBeUndefined() + } finally { + fixture.cleanup() + } + }) + + it('does not let the current working directory .env override the endpoint for home credentials', async () => { + const fixture = createCliFixture() + writeFileSync( + fixture.credentialsFilePath, + [ + 'TRANSLOADIT_KEY=home-key', + 'TRANSLOADIT_SECRET=home-secret', + 'TRANSLOADIT_ENDPOINT=https://api2.example.test', + ].join('\n'), + ) + writeFileSync( + path.join(fixture.cwd, '.env'), + 'TRANSLOADIT_ENDPOINT=https://attacker.example.test\n', + ) + + clearAmbientTransloaditEnv() + vi.stubEnv('TRANSLOADIT_CREDENTIALS_FILE', fixture.credentialsFilePath) + process.chdir(fixture.cwd) + + const fetchSpy = vi.fn( + async () => + new Response(JSON.stringify({ access_token: 'abc', token_type: 'Bearer', expires_in: 1 }), { + status: 200, + headers: { 'content-type': 'application/json' }, + }), + ) + vi.stubGlobal('fetch', fetchSpy as unknown as typeof fetch) + vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + vi.spyOn(console, 'error').mockImplementation(() => {}) + + try { + await main(['auth', 'token']) + + const [url] = fetchSpy.mock.calls[0] as [string, RequestInit] + expect(url).toBe('https://api2.example.test/token') + expect(process.exitCode).toBeUndefined() + } finally { + fixture.cleanup() + } + }) + + it('does not let the current working directory .env redirect home credentials when no home endpoint is set', async () => { + const fixture = createCliFixture() + writeFileSync( + fixture.credentialsFilePath, + ['TRANSLOADIT_KEY=home-key', 'TRANSLOADIT_SECRET=home-secret'].join('\n'), + ) + writeFileSync( + path.join(fixture.cwd, '.env'), + 'TRANSLOADIT_ENDPOINT=https://attacker.example.test\n', + ) + + clearAmbientTransloaditEnv() + vi.stubEnv('TRANSLOADIT_CREDENTIALS_FILE', fixture.credentialsFilePath) + process.chdir(fixture.cwd) + + const fetchSpy = vi.fn( + async () => + new Response(JSON.stringify({ access_token: 'abc', token_type: 'Bearer', expires_in: 1 }), { + status: 200, + headers: { 'content-type': 'application/json' }, + }), + ) + vi.stubGlobal('fetch', fetchSpy as unknown as typeof fetch) + vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + vi.spyOn(console, 'error').mockImplementation(() => {}) + + try { + await main(['auth', 'token']) + + const [url] = fetchSpy.mock.calls[0] as [string, RequestInit] + expect(url).toBe('https://api2.transloadit.com/token') + expect(process.exitCode).toBeUndefined() + } finally { + fixture.cleanup() + } + }) + + it('loads the current working directory .env into process.env', async () => { + const fixture = createCliFixture() + writeFileSync( + path.join(fixture.cwd, '.env'), + ['DEBUG=transloadit*', 'TRANSLOADIT_AUTH_TOKEN=dotenv-token'].join('\n'), + ) + + clearAmbientTransloaditEnv() + vi.stubEnv('DEBUG', '') + process.chdir(fixture.cwd) + + const listSpy = vi.spyOn(Transloadit.prototype, 'listTemplates').mockResolvedValue({ + items: [], + count: 0, + }) + vi.spyOn(OutputCtl.prototype, 'print').mockImplementation(() => {}) + vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + + try { + await main(['templates', 'list']) + + expect(listSpy).toHaveBeenCalled() + expect(process.env.DEBUG).toBe('transloadit*') + expect(process.exitCode).toBeUndefined() + } finally { + fixture.cleanup() + } + }) + + it('does not reuse dotenv credentials after changing directories', async () => { + const firstFixture = createCliFixture() + const secondFixture = createCliFixture() + const emptyCredentialsFilePath = path.join(firstFixture.root, 'empty-credentials.env') + writeFileSync(path.join(firstFixture.cwd, '.env'), 'TRANSLOADIT_AUTH_TOKEN=dotenv-token\n') + writeFileSync(emptyCredentialsFilePath, '') + + clearAmbientTransloaditEnv() + vi.stubEnv('TRANSLOADIT_CREDENTIALS_FILE', emptyCredentialsFilePath) + + const listSpy = vi.spyOn(Transloadit.prototype, 'listTemplates').mockResolvedValue({ + items: [], + count: 0, + }) + vi.spyOn(OutputCtl.prototype, 'print').mockImplementation(() => {}) + vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + const stderrSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) + + try { + process.chdir(firstFixture.cwd) + await main(['templates', 'list']) + + process.chdir(secondFixture.cwd) + await main(['templates', 'list']) + + expect(listSpy).toHaveBeenCalledTimes(1) + expect(stderrSpy).toHaveBeenCalled() + expect(process.exitCode).toBe(1) + } finally { + firstFixture.cleanup() + secondFixture.cleanup() + } + }) + + it('does not keep injected dotenv credentials after the file is removed in the same directory', async () => { + const fixture = createCliFixture() + const emptyCredentialsFilePath = path.join(fixture.root, 'empty-credentials.env') + const dotenvPath = path.join(fixture.cwd, '.env') + writeFileSync(dotenvPath, 'TRANSLOADIT_AUTH_TOKEN=dotenv-token\n') + writeFileSync(emptyCredentialsFilePath, '') + + clearAmbientTransloaditEnv() + vi.stubEnv('TRANSLOADIT_CREDENTIALS_FILE', emptyCredentialsFilePath) + + const listSpy = vi.spyOn(Transloadit.prototype, 'listTemplates').mockResolvedValue({ + items: [], + count: 0, + }) + vi.spyOn(OutputCtl.prototype, 'print').mockImplementation(() => {}) + vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + const stderrSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) + + try { + process.chdir(fixture.cwd) + await main(['templates', 'list']) + rmSync(dotenvPath) + await main(['templates', 'list']) + + expect(listSpy).toHaveBeenCalledTimes(1) + expect(stderrSpy).toHaveBeenCalled() + expect(process.exitCode).toBe(1) + } finally { + fixture.cleanup() + } + }) + + it('merges shell credentials with the current working directory .env', async () => { + const fixture = createCliFixture() + writeFileSync(path.join(fixture.cwd, '.env'), 'TRANSLOADIT_SECRET=dotenv-secret\n') + + clearAmbientTransloaditEnv() + vi.stubEnv('TRANSLOADIT_KEY', 'shell-key') + process.chdir(fixture.cwd) + + const stdoutSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + const stderrSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) + + try { + await runSig({ + providedInput: JSON.stringify({ auth: { expires: '2025-01-03T00:00:00.000Z' } }), + }) + + expect(stderrSpy).not.toHaveBeenCalled() + const output = JSON.parse(`${stdoutSpy.mock.calls[0]?.[0]}`.trim()) + const params = JSON.parse(output.params as string) + expect(params.auth?.key).toBe('shell-key') + expect(process.exitCode).toBeUndefined() + } finally { + fixture.cleanup() + } + }) +}) diff --git a/packages/node/test/unit/cli/test-cli.test.ts b/packages/node/test/unit/cli/test-cli.test.ts index 19d8d73c..088d05ea 100644 --- a/packages/node/test/unit/cli/test-cli.test.ts +++ b/packages/node/test/unit/cli/test-cli.test.ts @@ -1,4 +1,4 @@ -import { mkdtempSync, rmSync, symlinkSync } from 'node:fs' +import { mkdirSync, mkdtempSync, rmSync, symlinkSync, writeFileSync } from 'node:fs' import { tmpdir } from 'node:os' import path from 'node:path' import { fileURLToPath } from 'node:url' @@ -9,11 +9,44 @@ import OutputCtl from '../../../src/cli/OutputCtl.ts' import { main, shouldRunCli } from '../../../src/cli.ts' import { Transloadit } from '../../../src/Transloadit.ts' +const originalCwd = process.cwd() + const resetExitCode = () => { process.exitCode = undefined } +function createIsolatedCliFixture(): { + cleanup: () => void + credentialsFilePath: string + cwd: string +} { + const root = mkdtempSync(path.join(tmpdir(), 'transloadit-cli-credentials-')) + const cwd = path.join(root, 'workspace') + const credentialsFilePath = path.join(root, 'credentials') + + mkdirSync(cwd, { recursive: true }) + writeFileSync(credentialsFilePath, '') + + return { + cwd, + credentialsFilePath, + cleanup: () => { + rmSync(root, { recursive: true, force: true }) + }, + } +} + +function clearAmbientTransloaditEnv(): void { + vi.stubEnv('TRANSLOADIT_KEY', '') + vi.stubEnv('TRANSLOADIT_SECRET', '') + vi.stubEnv('TRANSLOADIT_AUTH_KEY', '') + vi.stubEnv('TRANSLOADIT_AUTH_SECRET', '') + vi.stubEnv('TRANSLOADIT_AUTH_TOKEN', '') + vi.stubEnv('TRANSLOADIT_ENDPOINT', '') +} + afterEach(() => { + process.chdir(originalCwd) vi.restoreAllMocks() vi.unstubAllEnvs() resetExitCode() @@ -115,14 +148,10 @@ describe('cli smart_sig', () => { }) it('fails when credentials are missing', async () => { - const originalKey = process.env.TRANSLOADIT_KEY - const originalSecret = process.env.TRANSLOADIT_SECRET - const originalAuthKey = process.env.TRANSLOADIT_AUTH_KEY - const originalAuthSecret = process.env.TRANSLOADIT_AUTH_SECRET - delete process.env.TRANSLOADIT_KEY - delete process.env.TRANSLOADIT_SECRET - delete process.env.TRANSLOADIT_AUTH_KEY - delete process.env.TRANSLOADIT_AUTH_SECRET + const fixture = createIsolatedCliFixture() + clearAmbientTransloaditEnv() + vi.stubEnv('TRANSLOADIT_CREDENTIALS_FILE', fixture.credentialsFilePath) + process.chdir(fixture.cwd) const stdoutSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) const stderrSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) @@ -137,19 +166,15 @@ describe('cli smart_sig', () => { }) expect(stdoutSpy).not.toHaveBeenCalled() - expect(stderrSpy).toHaveBeenCalledWith( - 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', - ) + expect(stderrSpy).toHaveBeenCalled() + const message = `${stderrSpy.mock.calls[0]?.[0]}` + expect(message).toContain('Missing credentials.') + expect(message).toContain('1. Shell env:') + expect(message).toContain('2. Current directory .env:') + expect(message).toContain(`3. Credentials file: ${fixture.credentialsFilePath}`) expect(process.exitCode).toBe(1) } finally { - if (originalKey != null) process.env.TRANSLOADIT_KEY = originalKey - else delete process.env.TRANSLOADIT_KEY - if (originalSecret != null) process.env.TRANSLOADIT_SECRET = originalSecret - else delete process.env.TRANSLOADIT_SECRET - if (originalAuthKey != null) process.env.TRANSLOADIT_AUTH_KEY = originalAuthKey - else delete process.env.TRANSLOADIT_AUTH_KEY - if (originalAuthSecret != null) process.env.TRANSLOADIT_AUTH_SECRET = originalAuthSecret - else delete process.env.TRANSLOADIT_AUTH_SECRET + fixture.cleanup() } }) @@ -294,14 +319,10 @@ describe('cli sig', () => { }) it('fails when credentials are missing', async () => { - const originalKey = process.env.TRANSLOADIT_KEY - const originalSecret = process.env.TRANSLOADIT_SECRET - const originalAuthKey = process.env.TRANSLOADIT_AUTH_KEY - const originalAuthSecret = process.env.TRANSLOADIT_AUTH_SECRET - delete process.env.TRANSLOADIT_KEY - delete process.env.TRANSLOADIT_SECRET - delete process.env.TRANSLOADIT_AUTH_KEY - delete process.env.TRANSLOADIT_AUTH_SECRET + const fixture = createIsolatedCliFixture() + clearAmbientTransloaditEnv() + vi.stubEnv('TRANSLOADIT_CREDENTIALS_FILE', fixture.credentialsFilePath) + process.chdir(fixture.cwd) const stdoutSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) const stderrSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) @@ -310,19 +331,15 @@ describe('cli sig', () => { await runSig({ providedInput: '{}' }) expect(stdoutSpy).not.toHaveBeenCalled() - expect(stderrSpy).toHaveBeenCalledWith( - 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', - ) + expect(stderrSpy).toHaveBeenCalled() + const message = `${stderrSpy.mock.calls[0]?.[0]}` + expect(message).toContain('Missing credentials.') + expect(message).toContain('1. Shell env:') + expect(message).toContain('2. Current directory .env:') + expect(message).toContain(`3. Credentials file: ${fixture.credentialsFilePath}`) expect(process.exitCode).toBe(1) } finally { - if (originalKey != null) process.env.TRANSLOADIT_KEY = originalKey - else delete process.env.TRANSLOADIT_KEY - if (originalSecret != null) process.env.TRANSLOADIT_SECRET = originalSecret - else delete process.env.TRANSLOADIT_SECRET - if (originalAuthKey != null) process.env.TRANSLOADIT_AUTH_KEY = originalAuthKey - else delete process.env.TRANSLOADIT_AUTH_KEY - if (originalAuthSecret != null) process.env.TRANSLOADIT_AUTH_SECRET = originalAuthSecret - else delete process.env.TRANSLOADIT_AUTH_SECRET + fixture.cleanup() } }) @@ -392,6 +409,25 @@ describe('cli help', () => { const message = stdoutSpy.mock.calls.map((call) => `${call[0]}`).join('') expect(message).toContain('Transloadit CLI') }) + + it('prints usage when --help is provided even if the current directory .env is unreadable', async () => { + const fixture = createIsolatedCliFixture() + mkdirSync(path.join(fixture.cwd, '.env')) + process.chdir(fixture.cwd) + + const stdoutSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + const stderrSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) + + try { + await main(['--help']) + + const message = stdoutSpy.mock.calls.map((call) => `${call[0]}`).join('') + expect(stderrSpy).not.toHaveBeenCalled() + expect(message).toContain('Transloadit CLI') + } finally { + fixture.cleanup() + } + }) }) describe('cli docs robots', () => { diff --git a/packages/transloadit/README.md b/packages/transloadit/README.md index b48854c1..347ffb2b 100644 --- a/packages/transloadit/README.md +++ b/packages/transloadit/README.md @@ -51,22 +51,66 @@ This package includes a full-featured CLI for interacting with Transloadit from ### Quick Start +Pick one auth setup and then run the CLI. + +Use shell env vars: + ```bash -# Set your credentials export TRANSLOADIT_KEY="YOUR_TRANSLOADIT_KEY" export TRANSLOADIT_SECRET="YOUR_TRANSLOADIT_SECRET" +``` + +Or create a reusable home credentials file: + +```bash +mkdir -p ~/.transloadit +cat > ~/.transloadit/credentials <<'EOF' +TRANSLOADIT_KEY="YOUR_TRANSLOADIT_KEY" +TRANSLOADIT_SECRET="YOUR_TRANSLOADIT_SECRET" +EOF +chmod 600 ~/.transloadit/credentials +``` + +Then see all available commands: -# See all available commands +```bash npx -y @transloadit/node --help ``` The CLI binary is still called `transloadit`, so command examples below may use `npx transloadit ...`. +### Credential Resolution + +The CLI resolves authentication in this order: + +1. Shell environment variables such as `TRANSLOADIT_KEY`, `TRANSLOADIT_SECRET`, and `TRANSLOADIT_AUTH_TOKEN` +2. The current working directory `.env` +3. `~/.transloadit/credentials` + +The home credentials file uses dotenv syntax. It is meant for user-level CLI use, so Intents and +other commands work from any directory on your machine without having to export credentials first. + +Example `~/.transloadit/credentials`: + +```env +TRANSLOADIT_KEY="YOUR_TRANSLOADIT_KEY" +TRANSLOADIT_SECRET="YOUR_TRANSLOADIT_SECRET" +# Optional: +# TRANSLOADIT_ENDPOINT="https://api2.transloadit.com" +# TRANSLOADIT_AUTH_TOKEN="YOUR_BEARER_TOKEN" +``` + +If you want to use a different path, set `TRANSLOADIT_CREDENTIALS_FILE=/abs/path/to/credentials.env`. + +Most commands can authenticate with either `TRANSLOADIT_AUTH_TOKEN` or `TRANSLOADIT_KEY` + +`TRANSLOADIT_SECRET`. Commands that mint bearer tokens or generate signatures still require +`TRANSLOADIT_KEY` and `TRANSLOADIT_SECRET`. + ### Minting Bearer Tokens (Hosted MCP) If you want to connect an agent to the Transloadit-hosted MCP endpoint, mint a short-lived bearer -token via `POST /token`: +token via `POST /token`. This command also uses the same credential resolution order above: ```bash # Prints JSON to stdout (stderr may include npx/npm noise) @@ -93,12 +137,14 @@ The full generated intent reference also lives in [`docs/intent-commands.md`](./ #### At a glance Intent commands are the fastest path to common one-off tasks from the CLI. +Authentication is resolved in this order: shell environment, the current working directory `.env`, then `~/.transloadit/credentials`. +The home credentials file uses dotenv syntax and can include `TRANSLOADIT_KEY`, `TRANSLOADIT_SECRET`, `TRANSLOADIT_ENDPOINT`, and `TRANSLOADIT_AUTH_TOKEN`. Use `--print-urls` when you want temporary result URLs without downloading locally. All intent commands also support the global CLI flags `--json`, `--log-level`, `--endpoint`, and `--help`. | Command | What it does | Input | Output | | --- | --- | --- | --- | -| `image generate` | Generate images from text prompts | none | file | +| `image generate` | Generate images from text prompts | file, dir, URL, base64 | file | | `preview generate` | Generate a preview thumbnail | file, dir, URL, base64 | file | | `image remove-background` | Remove the background from images | file, dir, URL, base64 | file | | `image optimize` | Optimize images without quality loss | file, dir, URL, base64 | file | @@ -164,43 +210,49 @@ These flags are available across many intent commands, so the per-command sectio Generate images from text prompts -Runs `/image/generate` and writes the result to `--out`. +Runs `/image/generate`. Without inputs, this is text-to-image. With one or more `--input` files, the inputs are bundled into a single assembly so the prompt can refer to them by filename. **Usage** ```bash -npx transloadit image generate [options] +npx transloadit image generate [--input ] [options] ``` **Quick facts** -- Input: none +- Input: file, dir, URL, base64 - Output: file -- Execution: no input -- Backend: `/image/generate` +- Execution: single assembly +- Backend: semantic alias `image-generate` **Shared flags** -- Uses the shared output flags listed above. +- Uses the shared file input and output flags listed above. +- Also supports the shared base processing flags listed above. **Command options** | Flag | Type | Required | Example | Description | | --- | --- | --- | --- | --- | -| `--model` | `string` | no | `value` | The AI model to use for image generation. Defaults to google/nano-banana. | | `--prompt` | `string` | yes | `"A red bicycle in a studio"` | The prompt describing the desired image content. | +| `--model` | `string` | no | `google/nano-banana-2` | The AI model to use for image generation. Defaults to google/nano-banana-2. | | `--format` | `string` | no | `jpg` | Format of the generated image. | -| `--seed` | `number` | no | `1` | Seed for the random number generator. | -| `--aspect-ratio` | `string` | no | `value` | Aspect ratio of the generated image. | -| `--height` | `number` | no | `1` | Height of the generated image. | -| `--width` | `number` | no | `1` | Width of the generated image. | -| `--style` | `string` | no | `value` | Style of the generated image. | -| `--num-outputs` | `number` | no | `1` | Number of image variants to generate. | +| `--seed` | `number` | no | — | Seed for the random number generator. | +| `--aspect-ratio` | `string` | no | — | Aspect ratio of the generated image. | +| `--height` | `number` | no | — | Height of the generated image. | +| `--width` | `number` | no | — | Width of the generated image. | +| `--style` | `string` | no | — | Style of the generated image. | +| `--num-outputs` | `number` | no | — | Number of image variants to generate. | **Examples** ```bash +# Generate an image from text transloadit image generate --prompt "A red bicycle in a studio" --out output.png +# Guide generation with one input image +transloadit image generate --input subject.jpg --prompt "Place subject.jpg on a magazine cover" --out output.png +# Guide generation with multiple input images +transloadit image generate --input person1.jpg --input person2.jpg --input background.jpg --prompt "Place person1.jpg feeding person2.jpg in front of background.jpg" --out output.png ``` #### `preview generate` @@ -236,6 +288,7 @@ npx transloadit preview generate --input [options] | `--height` | `number` | no | `1` | Height of the thumbnail, in pixels. | | `--resize-strategy` | `string` | no | `crop` | To achieve the desired dimensions of the preview thumbnail, the Robot might have to resize the generated image. | | `--background` | `string` | no | `value` | The hexadecimal code of the color used to fill the background (only used for the pad resize strategy). | +| `--zoom` | `boolean` | no | `true` | If set to false, smaller images will not be stretched to the desired width and height. | | `--strategy` | `json` | no | `value` | Definition of the thumbnail generation process per file category. | | `--artwork-outer-color` | `string` | no | `value` | The color used in the outer parts of the artwork's gradient. | | `--artwork-center-color` | `string` | no | `value` | The color used in the center of the artwork's gradient. | @@ -291,7 +344,7 @@ npx transloadit image remove-background --input [options] | Flag | Type | Required | Example | Description | | --- | --- | --- | --- | --- | | `--select` | `string` | no | `foreground` | Region to select and keep in the image. The other region is removed. | -| `--format` | `string` | no | `png` | Format of the generated image. | +| `--format` | `string` | no | `png` | Format of the generated image. Defaults to PNG when not provided. | | `--provider` | `string` | no | `aws` | Provider to use for removing the background. | | `--model` | `string` | no | `value` | Provider-specific model to use for removing the background. Mostly intended for testing and evaluation. | @@ -333,6 +386,7 @@ npx transloadit image optimize --input [options] | `--progressive` | `boolean` | no | `true` | Interlaces the image if set to true, which makes the result image load progressively in browsers. | | `--preserve-meta-data` | `boolean` | no | `true` | Specifies if the image's metadata should be preserved during the optimization, or not. | | `--fix-breaking-images` | `boolean` | no | `true` | If set to true this parameter tries to fix images that would otherwise make the underlying tool error out and thereby break your Assemblies . | +| `--lossy` | `boolean` | no | `true` | When set to false (the default), only lossless PNG optimizers are used, disabling pngquant to preserve color accuracy. | **Examples** @@ -410,6 +464,7 @@ npx transloadit image resize --input [options] | `--trim-whitespace` | `boolean` | no | `true` | This determines if additional whitespace around the image should first be trimmed away. | | `--clip` | `auto` | no | `value` | Apply the clipping path to other operations in the resize job, if one is present. | | `--negate` | `boolean` | no | `true` | Replace each pixel with its complementary color, effectively negating the image. Especially useful when testing clipping. | +| `--clut` | `boolean` | no | `true` | Applies a Color Look-Up Table (CLUT) image to remap the colors of the input image using ImageMagick's -clut operator. | | `--density` | `string` | no | `value` | While in-memory quality and file format depth specifies the color resolution, the density of an image is the spatial (space) resolution of the image. | | `--monochrome` | `boolean` | no | `true` | Transform the image to black and white. This is a shortcut for setting the colorspace to Gray and type to Bilevel. | | `--shave` | `auto` | no | `value` | Shave pixels from the image edges. The value should be in the format width or widthxheight to specify the number of pixels to remove from each side. | @@ -565,6 +620,7 @@ npx transloadit document thumbs --input [options] | Flag | Type | Required | Example | Description | | --- | --- | --- | --- | --- | | `--page` | `number` | no | `1` | The PDF page that you want to convert to an image. By default the value is null which means that all pages will be converted into images. | +| `--page-range` | `string` | no | `value` | A page range to extract, in the format "start-end" (e.g., "1-20"). | | `--format` | `string` | no | `jpg` | The format of the extracted image(s). If you specify the value "gif", then an animated gif cycling through all pages is created. Please check out this demo to learn more about… | | `--delay` | `number` | no | `1` | If your output format is "gif" then this parameter sets the number of 100th seconds to pass before the next frame is shown in the animation. | | `--width` | `number` | no | `1` | Width of the new image, in pixels. If not specified, will default to the width of the input image | @@ -577,7 +633,7 @@ npx transloadit document thumbs --input [options] | `--colorspace` | `string` | no | `CMY` | Sets the image colorspace. For details about the available values, see the ImageMagick documentation. Please note that if you were using "RGB", we recommend using "sRGB".… | | `--trim-whitespace` | `boolean` | no | `true` | This determines if additional whitespace around the PDF should first be trimmed away before it is converted to an image. | | `--pdf-use-cropbox` | `boolean` | no | `true` | Some PDF documents lie about their dimensions. For instance they'll say they are landscape, but when opened in decent Desktop readers, it's really in portrait mode. This can… | -| `--turbo` | `boolean` | no | `true` | If you set this to false, the robot will not emit files as they become available. | +| `--turbo` | `boolean` | no | `true` | Enables high-performance mode for faster document processing. | **Examples** @@ -614,14 +670,14 @@ npx transloadit audio waveform --input [options] | Flag | Type | Required | Example | Description | | --- | --- | --- | --- | --- | | `--ffmpeg` | `json` | no | `value` | A parameter object to be passed to FFmpeg. If a preset is used, the options specified are merged on top of the ones from the preset. For available options, see the FFmpeg… | -| `--format` | `string` | no | `image` | The format of the result file. Can be "image" or "json". If "image" is supplied, a PNG image will be created, otherwise a JSON file. | +| `--format` | `string` | no | `image` | The format of the result file. Can be "image" or "json". If "image" is supplied, a PNG image will be created, otherwise a JSON file. When style is "spectrogram", only "image" is… | | `--width` | `number` | no | `1` | The width of the resulting image if the format "image" was selected. | | `--height` | `number` | no | `1` | The height of the resulting image if the format "image" was selected. | | `--antialiasing` | `auto` | no | `0` | Either a value of 0 or 1, or true/false, corresponding to if you want to enable antialiasing to achieve smoother edges in the waveform graph or not. | | `--background-color` | `string` | no | `value` | The background color of the resulting image in the "rrggbbaa" format (red, green, blue, alpha), if the format "image" was selected. | | `--center-color` | `string` | no | `value` | The color used in the center of the gradient. The format is "rrggbbaa" (red, green, blue, alpha). | | `--outer-color` | `string` | no | `value` | The color used in the outer parts of the gradient. The format is "rrggbbaa" (red, green, blue, alpha). | -| `--style` | `string` | no | `v0` | Waveform style version. - "v0": Legacy waveform generation (default). - "v1": Advanced waveform generation with additional parameters. For backwards compatibility, numeric values… | +| `--style` | `string` | no | `v0` | Waveform style version. - "v0": Legacy waveform generation (default). - "v1": Advanced waveform generation with additional parameters. - "spectrogram": Spectrogram visualization… | | `--split-channels` | `boolean` | no | `true` | Available when style is "v1". If set to true, outputs multi-channel waveform data or image files, one per channel. | | `--zoom` | `number` | no | `1` | Available when style is "v1". Zoom level in samples per pixel. This parameter cannot be used together with pixels_per_second. | | `--pixels-per-second` | `number` | no | `1` | Available when style is "v1". Zoom level in pixels per second. This parameter cannot be used together with zoom. | @@ -639,6 +695,13 @@ npx transloadit audio waveform --input [options] | `--with-axis-labels` | `boolean` | no | `true` | Available when style is "v1". If set to true, renders waveform image with axis labels. | | `--amplitude-scale` | `number` | no | `1` | Available when style is "v1". Amplitude scale factor. | | `--compression` | `number` | no | `1` | Available when style is "v1". PNG compression level: 0 (none) to 9 (best), or -1 (default). Only applicable when format is "image". | +| `--color-map` | `string` | no | `viridis` | Available when style is "spectrogram". Color scheme for the spectrogram visualization. Defaults to "viridis". | +| `--frequency-scale` | `string` | no | `linear` | Available when style is "spectrogram". Frequency scale for the spectrogram. "linear" shows frequencies evenly spaced, "logarithmic" emphasizes lower frequencies. Defaults to… | +| `--frequency-min` | `number` | no | `1` | Available when style is "spectrogram". Minimum frequency in Hz to display. Defaults to 0. | +| `--frequency-max` | `number` | no | `1` | Available when style is "spectrogram". Maximum frequency in Hz to display. Defaults to half the sample rate (Nyquist frequency). | +| `--legend` | `boolean` | no | `true` | Available when style is "spectrogram". Whether to include a legend showing the frequency and time scales. Defaults to false. | +| `--gain` | `number` | no | `1` | Available when style is "spectrogram". Linear gain factor for spectrogram intensity. Defaults to 1. | +| `--orientation` | `string` | no | `vertical` | Available when style is "spectrogram". Orientation of the spectrogram. "horizontal" shows time on the x-axis (default), "vertical" shows time on the y-axis. | **Examples** @@ -655,7 +718,7 @@ Runs `/text/speak` on each input file and writes the result to `--out`. **Usage** ```bash -npx transloadit text speak --input [options] +npx transloadit text speak [--input ] [options] ``` **Quick facts** @@ -918,6 +981,7 @@ npx transloadit file compress --input [options] | `--compression-level` | `number` | no | `1` | Determines how fiercely to try to compress the archive. -0 is compressionless, which is suitable for media that is already compressed. -1 is fastest with lowest compression. -9… | | `--file-layout` | `string` | no | `advanced` | Determines if the result archive should contain all files in one directory (value for this is "simple") or in subfolders according to the explanation below (value for this is… | | `--archive-name` | `string` | no | `value` | The name of the archive file to be created (without the file extension). | +| `--path` | `string` | no | `value` | The path at which each file is to be placed inside the archive. | **Examples** @@ -949,6 +1013,13 @@ npx transloadit file decompress --input [options] - Uses the shared file input and output flags listed above. - Also supports the shared base processing flags, watch flags, bundling flags listed above. +**Command options** + +| Flag | Type | Required | Example | Description | +| --- | --- | --- | --- | --- | +| `--password` | `string` | no | `value` | The password to use for decrypting password-protected archives. | +| `--turbo` | `boolean` | no | `true` | Enables Turbo Mode for /file/decompress. This setting defaults to true. Set it to false to disable Turbo Mode. When enabled, extracted files are emitted as soon as they are… | + **Examples** ```bash @@ -1735,3 +1806,7 @@ See [CONTRIBUTING](./CONTRIBUTING.md). + + + +