Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
2df236e
(wip) Started refactor, added CLI, started moving validators to their…
royanger Mar 2, 2024
ee654d0
(wip) Added Supabase validator
royanger Mar 2, 2024
58fe501
(wip) Check if the file exists
royanger Mar 2, 2024
bc86a10
(wip) Reading from .json, and from .csv into JSON
royanger Mar 2, 2024
335b642
(wip) Moved validator list to constant, generate CLI options on deman…
royanger Mar 2, 2024
5a28fc6
(wip) Moved validators into their own directory, added metadata to ea…
royanger Mar 2, 2024
a43625b
(wip) Minor improvements,some more typing
royanger Mar 2, 2024
bdad2f6
(wip) Refactored to transform incoming data to expected schema
royanger Mar 3, 2024
7a02123
(wip) Basic import is now working
royanger Mar 3, 2024
8367ee7
(wip) Cleanup and adding spinners + messaging
royanger Mar 3, 2024
398e9ec
(wip) Added logger, some cleanup
royanger Mar 3, 2024
3f400b2
(wip) Improved logger, creates directories as needed
royanger Mar 3, 2024
f2d6e6a
(wip) Improved logger significantly, removed blank/empty entries from…
royanger Mar 3, 2024
e959bc0
(wip) Added eslint
royanger Mar 3, 2024
34f3768
(wip)
royanger Mar 3, 2024
28b852e
(wip) Added prettier
royanger Mar 4, 2024
969c4a1
(wip) Removed spinner that was added just for testing.
royanger Mar 4, 2024
63fa0fd
(wip) Added Supabase JSON sample
royanger Mar 4, 2024
6d5f6a4
(wip) Added transform/validation to JSON and CSV
royanger Mar 4, 2024
242d0e7
Removed need for different handler filers, combined into one.
royanger Mar 4, 2024
4af9dab
Formatting
royanger Mar 4, 2024
c46c5e3
Bug fixes and minor updates
royanger Mar 4, 2024
cb11d76
Fixed types with any on transform functions and used a newer .hasOwnP…
JacobMGEvans Mar 4, 2024
dea3101
Updated Supabase handler, added sample, add code to add default field
royanger Mar 4, 2024
2a22ff4
Merge pull request #12 from clerk/jacob/cut-from-roy-refactor
royanger Mar 4, 2024
10fb7d9
improved Logger type and fixed updated type handling the undefined wi…
JacobMGEvans Mar 4, 2024
1852a79
Apply suggestions from code review
royanger Mar 4, 2024
08ab75c
Type passed loadUsersFromFile needs validation, handling with cast fo…
JacobMGEvans Mar 4, 2024
64b096e
Updated Auth0 map
royanger Mar 4, 2024
802f34c
handle merge conflict
JacobMGEvans Mar 4, 2024
4829cda
Merge pull request #13 from clerk/jacob/testing-refactor
JacobMGEvans Mar 5, 2024
b10b1cc
Added more tests for file types for loadUsers & added errorLogger test
JacobMGEvans Mar 6, 2024
31b7cce
Merge pull request #14 from clerk/jacob/testing
JacobMGEvans Mar 6, 2024
bb0a927
fix pathing for windows
jescalan Jul 26, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions .eslintrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
module.exports = {
env: {
browser: true,
es2021: true,
},
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
overrides: [
{
env: {
node: true,
},
files: [".eslintrc.{js,cjs}"],
parserOptions: {
sourceType: "script",
},
},
],
parser: "@typescript-eslint/parser",
parserOptions: {
ecmaVersion: "latest",
sourceType: "module",
},
plugins: ["@typescript-eslint"],
rules: {},
};
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
node_modules
.env
users.json
migration-log.json
bun.lockb
users.csv
package-lock.json
yarn.lock
pnpm-lock.yaml
logs
6 changes: 6 additions & 0 deletions .prettierignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
/logs/**
/samples/**
**/*.json
**/*.csv


12 changes: 12 additions & 0 deletions .prettierrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
module.exports = {
prettier: {
trailingComma: "es5",
tabWidth: 2,
semi: false,
singleQuote: true,
printWidth: 80,
semi: true,
bracketSpacing: true,
arrowParans: "always",
},
};
366 changes: 183 additions & 183 deletions LICENSE.Apache-2.0.md

Large diffs are not rendered by default.

48 changes: 25 additions & 23 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,20 +17,21 @@ npm install
```

### Users.json file
Create a `users.json` file. This file should be populated with all the users that need to be imported. The users should pass this schema:

Create a `users.json` file. This file should be populated with all the users that need to be imported. The users should pass this schema:

```ts
[
{
"userId": "string",
"email": "email",
"firstName": "string (optional)",
"lastName": "string (optional)",
"password": "string (optional)",
"passwordHasher": "argon2 | argon | bcrypt | md5 | pbkdf2_sha256 | pbkdf2_sha256_django | pbkdf2_sha1 | scrypt_firebase",
}
]
userId: "string",
email: "email",
firstName: "string (optional)",
lastName: "string (optional)",
password: "string (optional)",
passwordHasher:
"argon2 | argon | bcrypt | md5 | pbkdf2_sha256 | pbkdf2_sha256_django | pbkdf2_sha1 | scrypt_firebase",
},
];
```

The only required fields are `userId` and `email`. First and last names can be added if available. Clerk will also accept hashed password values along with the hashing algorithm used (the default is `bcrypt`).
Expand All @@ -49,12 +50,12 @@ Here are a couple examples.
"userId": "2",
"email": "john@blurp.com",
"password": "$2a$10$N9qo8uLOickgx2ZMRZoMyeIjZAgcfl7p92ldGxad68LJZdL17lhWy",
"passwordHasher": "bcrypt" // default value
"passwordHasher": "bcrypt" // default value
}
]
```

The samples/ folder contains some samples, including issues that will produce errors when running the import.
The samples/ folder contains some samples, including issues that will produce errors when running the import.

### Secret Key

Expand All @@ -78,12 +79,12 @@ The script can be run on the same data multiple times, Clerk automatically uses

The script can be configured through the following environment variables:

| Variable | Description | Default |
| -------- | ----------- | ------- |
| `CLERK_SECRET_KEY` | Your Clerk secret key | `undefined` |
| `DELAY_MS` | Delay between requests to respect rate limits | `1000` |
| `RETRY_DELAY_MS` | Delay when the rate limit is hit | `10000` |
| `OFFSET` | Offset to start migration (number of users to skip) | `0` |
| Variable | Description | Default |
| ------------------ | --------------------------------------------------- | ----------- |
| `CLERK_SECRET_KEY` | Your Clerk secret key | `undefined` |
| `DELAY_MS` | Delay between requests to respect rate limits | `1000` |
| `RETRY_DELAY_MS` | Delay when the rate limit is hit | `10000` |
| `OFFSET` | Offset to start migration (number of users to skip) | `0` |

## Handling the Foreign Key constraint

Expand All @@ -93,21 +94,23 @@ If you were using a database, you will have data tied to your previous auth syst

Our sessions allow for conditional expressions. This would allow you add a session claim that will return either the `externalId` (the previous id for your user) when it exists, or the `userId` from Clerk. This will result in your imported users returning their `externalId` while newer users will return the Clerk `userId`.

In your Dashboard, go to Sessions -> Edit. Add the following:
In your Dashboard, go to Sessions -> Edit. Add the following:

```json
{
"userId": "{{user.external_id || user.id}}"
"userId": "{{user.external_id || user.id}}"
}
```

You can now access this value using the following:
```ts

```ts
const { sessionClaims } = auth();
console.log(sessionClaims.userId)
console.log(sessionClaims.userId);
```

You can add the following for typescript:
You can add the following for typescript:

```js
// types/global.d.ts

Expand All @@ -125,4 +128,3 @@ declare global {
You could continue to generate unique ids for the database as done previously, and then store those in `externalId`. This way all users would have an `externalId` that would be used for DB interactions.

You could add a column in your user table inside of your database called `ClerkId`. Use that column to store the userId from Clerk directly into your database.

151 changes: 17 additions & 134 deletions index.ts
Original file line number Diff line number Diff line change
@@ -1,148 +1,31 @@
import { config } from "dotenv";
config();

import * as fs from "fs";
import * as z from "zod";
import clerkClient from "@clerk/clerk-sdk-node";
import ora, { Ora } from "ora";

const SECRET_KEY = process.env.CLERK_SECRET_KEY;
const DELAY = parseInt(process.env.DELAY_MS ?? `1_000`);
const RETRY_DELAY = parseInt(process.env.RETRY_DELAY_MS ?? `10_000`);
const IMPORT_TO_DEV = process.env.IMPORT_TO_DEV_INSTANCE ?? "false";
const OFFSET = parseInt(process.env.OFFSET ?? `0`);

if (!SECRET_KEY) {
import { env } from "./src/envs-constants";
import { runCLI } from "./src/cli";
import { loadUsersFromFile } from "./src/functions";
import { importUsers } from "./src/import-users";

if (
env.CLERK_SECRET_KEY.split("_")[1] !== "live" &&
env.IMPORT_TO_DEV === false
) {
throw new Error(
"CLERK_SECRET_KEY is required. Please copy .env.example to .env and add your key."
"The Clerk Secret Key provided is for a development instance. Development instances are limited to 500 users and do not share their userbase with production instances. If you want to import users to your development instance, please set 'IMPORT_TO_DEV' in your .env to 'true'.",
);
}

if (SECRET_KEY.split("_")[1] !== "live" && IMPORT_TO_DEV === "false") {
throw new Error(
"The Clerk Secret Key provided is for a development instance. Development instances are limited to 500 users and do not share their userbase with production instances. If you want to import users to your development instance, please set 'IMPORT_TO_DEV_INSTANCE' in your .env to 'true'."
);
}

const userSchema = z.object({
userId: z.string(),
email: z.string().email(),
firstName: z.string().optional(),
lastName: z.string().optional(),
password: z.string().optional(),
passwordHasher: z
.enum([
"argon2i",
"argon2id",
"bcrypt",
"md5",
"pbkdf2_sha256",
"pbkdf2_sha256_django",
"pbkdf2_sha1",
"scrypt_firebase",
])
.optional(),
});

type User = z.infer<typeof userSchema>;

const createUser = (userData: User) =>
userData.password
? clerkClient.users.createUser({
externalId: userData.userId,
emailAddress: [userData.email],
firstName: userData.firstName,
lastName: userData.lastName,
passwordDigest: userData.password,
passwordHasher: userData.passwordHasher,
})
: clerkClient.users.createUser({
externalId: userData.userId,
emailAddress: [userData.email],
firstName: userData.firstName,
lastName: userData.lastName,
skipPasswordRequirement: true,
});

const now = new Date().toISOString().split(".")[0]; // YYYY-MM-DDTHH:mm:ss
function appendLog(payload: any) {
fs.appendFileSync(
`./migration-log-${now}.json`,
`\n${JSON.stringify(payload, null, 2)}`
);
}

let migrated = 0;
let alreadyExists = 0;

async function processUserToClerk(userData: User, spinner: Ora) {
const txt = spinner.text;
try {
const parsedUserData = userSchema.safeParse(userData);
if (!parsedUserData.success) {
throw parsedUserData.error;
}
await createUser(parsedUserData.data);

migrated++;
} catch (error) {
if (error.status === 422) {
appendLog({ userId: userData.userId, ...error });
alreadyExists++;
return;
}

// Keep cooldown in case rate limit is reached as a fallback if the thread blocking fails
if (error.status === 429) {
spinner.text = `${txt} - rate limit reached, waiting for ${RETRY_DELAY} ms`;
await rateLimitCooldown();
spinner.text = txt;
return processUserToClerk(userData, spinner);
}

appendLog({ userId: userData.userId, ...error });
}
}

async function cooldown() {
await new Promise((r) => setTimeout(r, DELAY));
}

async function rateLimitCooldown() {
await new Promise((r) => setTimeout(r, RETRY_DELAY));
}

async function main() {
console.log(`Clerk User Migration Utility`);

const inputFileName = process.argv[2] ?? "users.json";
const args = await runCLI();

console.log(`Fetching users from ${inputFileName}`);
// we can use Zod to validate the args.keys to ensure it is TransformKeys type
const users = await loadUsersFromFile(args.file, args.key);

const parsedUserData: any[] = JSON.parse(
fs.readFileSync(inputFileName, "utf-8")
const usersToImport = users.slice(
parseInt(args.offset) > env.OFFSET ? parseInt(args.offset) : env.OFFSET,
);
const offsetUsers = parsedUserData.slice(OFFSET);
console.log(
`users.json found and parsed, attempting migration with an offset of ${OFFSET}`
);

let i = 0;
const spinner = ora(`Migrating users`).start();

for (const userData of offsetUsers) {
spinner.text = `Migrating user ${i}/${offsetUsers.length}, cooldown`;
await cooldown();
i++;
spinner.text = `Migrating user ${i}/${offsetUsers.length}`;
await processUserToClerk(userData, spinner);
}

spinner.succeed(`Migration complete`);
return;
importUsers(usersToImport);
}

main().then(() => {
console.log(`${migrated} users migrated`);
console.log(`${alreadyExists} users failed to upload`);
});
main();
Loading