diff --git a/.dockerignore b/.dockerignore
index 2cb7fe042..e5cedb1b8 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,11 +1,48 @@
.editorconfig
+.env
.env.example
+.env.staging
+.git
.github
+.nx
.vscode
.workspace.code-workspace
app
-# dist
+apps/landing/.next
+dist
electron-scripts
logs
node_modules
npm-debug.log
+test-results
+tmp
+yarn-error.log
+
+# System Files
+.DS_Store
+Thumbs.db
+
+# Generated Docusaurus files
+.docusaurus/
+.cache-loader/
+
+# Next.js
+.next
+
+# Playwright
+**/test-results
+**/playwright-report
+**/playwright/.cache
+.nx
+
+# Not required for core jetstream
+apps/cron-tasks
+apps/docs
+apps/electron
+apps/jetstream-e2e
+electron-scripts
+*#
+*~
+.DS_Store
+Thumbs.db
+
diff --git a/.env.example b/.env.example
index 76dd5ec74..85a470ffe 100644
--- a/.env.example
+++ b/.env.example
@@ -22,7 +22,7 @@ SFDC_CONSUMER_KEY='3MVG9tSqyyAXNH5ItQtuplEg40Ks_MLSG37L1PV.TLDjsCbdp7EDonFUW0csS
SFDC_CONSUMER_SECRET='F77C1B4AF03CF51B290A591766F4C430E3136949A636D4AA5339F8EB6A40052A'
# API VERSION TO USE
-NX_SFDC_API_VERSION='55.0'
+SFDC_API_VERSION='58.0'
# If set to true, then authentication will be bypassed
# You will use a test account instead of a real account - only works if running locally
@@ -45,8 +45,6 @@ PRISMA_DEBUG='false'
NX_SFDC_CLIENT_ID_ELECTRON=''
NX_AUTH_AUDIENCE='http://getjetstream.app/app_metadata'
-
-NX_AG_GRID_KEY=''
NX_ROLLBAR_KEY=''
NX_AMPLITUDE_KEY=''
@@ -72,8 +70,8 @@ GOOGLE_ENC_KEY=''
ROLLBAR_SERVER_TOKEN=''
# Algolia API key - used to index docs pages
-APPLICATION_ID=''
-API_KEY=''
+ALGOLIA_APPLICATION_ID=''
+ALGOLIA_API_KEY=''
# HONEYCOMBE - server analytics, only set to true on hosted server
HONEYCOMB_ENABLED=false
diff --git a/.eslintrc.json b/.eslintrc.json
index 1b571356d..3df01e53b 100644
--- a/.eslintrc.json
+++ b/.eslintrc.json
@@ -41,13 +41,17 @@
{
"files": ["*.tsx"],
"rules": {
- "@typescript-eslint/no-unused-vars": "off"
+ "@typescript-eslint/no-explicit-any": "warn",
+ "@typescript-eslint/no-unused-vars": "warn"
}
},
{
"files": ["*.ts", "*.tsx"],
"extends": ["plugin:@nrwl/nx/typescript"],
- "rules": {}
+ "rules": {
+ "@typescript-eslint/no-explicit-any": "warn",
+ "@typescript-eslint/no-unused-vars": "warn"
+ }
},
{
"files": ["*.js", "*.jsx"],
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 69784e042..0548c3682 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
name: Checkout [master]
with:
fetch-depth: 0
@@ -30,21 +30,23 @@ jobs:
run: git branch --track main origin/main
- name: Derive appropriate SHAs for base and head for `nx affected` commands
- uses: nrwl/nx-set-shas@v3
+ uses: nrwl/nx-set-shas@v4
- - uses: actions/setup-node@v3
+ - uses: actions/setup-node@v4
with:
- node-version: '18'
+ node-version: '20'
cache: 'yarn'
- name: install dependencies
- run: yarn install --frozen-lockfile --prefer-offline
+ run: yarn install --frozen-lockfile
- name: Build application
+ env:
+ NODE_OPTIONS: '--max_old_space_size=4096'
run: npx nx run-many --target=build --parallel=3 --projects=jetstream,api,download-zip-sw,landing --configuration=production
- name: Uploading artifacts
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: dist-artifacts
path: dist
@@ -78,7 +80,7 @@ jobs:
SFDC_CONSUMER_KEY: ${{ secrets.SFDC_CONSUMER_KEY }}
SFDC_CONSUMER_SECRET: ${{ secrets.SFDC_CONSUMER_SECRET }}
SFDC_ENC_KEY: ${{ secrets.SFDC_ENC_KEY }}
- SFDC_API_VERSION: '57.0'
+ SFDC_API_VERSION: '58.0'
services:
postgres:
@@ -96,21 +98,21 @@ jobs:
- 5432:5432
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
name: Checkout [master]
with:
fetch-depth: 0
- - uses: actions/setup-node@v3
+ - uses: actions/setup-node@v4
with:
- node-version: '18'
+ node-version: '20'
cache: 'yarn'
- name: install dependencies
- run: yarn install --frozen-lockfile --prefer-offline
+ run: yarn install --frozen-lockfile
- name: Download artifacts from build
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: dist-artifacts
path: dist
@@ -139,7 +141,7 @@ jobs:
- name: Upload test results
if: always() # This ensures step will always run even if prior steps fail
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: playwright-report
path: |
diff --git a/.github/workflows/deployment-monitor.yml b/.github/workflows/deployment-monitor.yml
new file mode 100644
index 000000000..ee72fce62
--- /dev/null
+++ b/.github/workflows/deployment-monitor.yml
@@ -0,0 +1,21 @@
+name: Deployment
+on:
+ push:
+ branches:
+ - release
+
+jobs:
+ deploy:
+ name: Wait for Deploy
+ runs-on: ubuntu-latest
+ steps:
+ # https://github.com/marketplace/actions/render-github-action
+ - name: Wait for Render Deployment
+ uses: bounceapp/render-action@0.6.0
+ with:
+ render-token: ${{ secrets.RENDER_TOKEN }}
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ service-id: srv-cm55msocmk4c73cnddi0
+ # retries: 20
+ # wait: 16000
+ # sleep: 30000
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index c963ae67c..e4c610901 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -23,7 +23,7 @@ jobs:
- name: Init npm cache
uses: actions/setup-node@v3
with:
- node-version: '18'
+ node-version: '20'
cache: 'yarn'
- name: install dependencies
run: yarn install --frozen-lockfile
diff --git a/.gitignore b/.gitignore
index 04ac4f4d3..e340afc3c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -56,3 +56,5 @@ package-lock.json
**/test-results
**/playwright-report
**/playwright/.cache
+
+.nx/cache
\ No newline at end of file
diff --git a/.nvmrc b/.nvmrc
new file mode 100644
index 000000000..209e3ef4b
--- /dev/null
+++ b/.nvmrc
@@ -0,0 +1 @@
+20
diff --git a/.prettierignore b/.prettierignore
index 8237da597..80a9cf177 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -3,3 +3,5 @@
/dist
/coverage
.docusaurus/
+
+/.nx/cache
\ No newline at end of file
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
index 5677f0393..b736e95b4 100644
--- a/.vscode/extensions.json
+++ b/.vscode/extensions.json
@@ -8,6 +8,7 @@
"streetsidesoftware.code-spell-checker",
"eamodio.gitlens",
"ms-playwright.playwright",
- "wayou.vscode-todo-highlight"
+ "wayou.vscode-todo-highlight",
+ "firsttris.vscode-jest-runner"
]
}
diff --git a/.vscode/settings.json b/.vscode/settings.json
index f09b82ce0..7aae96ff5 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -3,7 +3,7 @@
"editor.tabSize": 2,
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
- "source.organizeImports": true
+ "source.organizeImports": "explicit"
},
"files.exclude": {
"**/.git": true,
diff --git a/Dockerfile b/Dockerfile
index c049bb1ab..25a88e40e 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,41 +1,57 @@
-# docker build -f Dockerfile . -t jetstream
-# docker-compose up
+# syntax = docker/dockerfile:1
-# Login and run DB migrations (TODO: figure out how to automate this)
-# https://medium.com/@sumankpaul/run-db-migration-script-in-docker-compose-ce8e447a77ba
-# docker ps
-# docker exec -it 791 bash
-# npx prisma migrate deploy
+ARG NODE_VERSION=20.10.0
+ARG ENVIRONMENT=production
-# TODO: auth redirect flow is broken, need to fix it
+FROM node:${NODE_VERSION}-slim as base
-FROM node:16
+# App lives here
+WORKDIR /app
-WORKDIR /usr/src/app
+# Set production environment
+ENV NODE_ENV=production
+ARG YARN_VERSION=1.22.21
+RUN npm install -g yarn@$YARN_VERSION --force
-# Copy application
-COPY ./dist/apps/api ./dist/apps/api/
-COPY ./dist/apps/jetstream ./dist/apps/jetstream/
-COPY ./dist/apps/download-zip-sw ./dist/apps/download-zip-sw/
-COPY ./dist/apps/landing ./dist/apps/landing/
+# Throw-away build stage to reduce size of final image
+FROM base as build
-# Copy supporting files
-COPY ./dist/apps/api/package.json .
-COPY ./yarn.lock .
-COPY ./.env .
-COPY ./ecosystem.config.js .
-COPY ./prisma ./prisma/
+# Install packages needed to build node modules
+RUN apt-get update -qq && \
+ apt-get install --no-install-recommends -y build-essential node-gyp openssl pkg-config python-is-python3
-# Install core dependencies
-RUN yarn
+# Install node modules
+COPY --link package.json yarn.lock ./
+RUN yarn install --frozen-lockfile --production=false
-# Install other dependencies that were not calculated by nx, but are required
-RUN yarn add dotenv prisma@^3.13.0
+# Generate Prisma Client
+COPY --link prisma .
+RUN yarn run db:generate
-# Generate prisma client - ensure that there are no OS differences
-RUN npx prisma generate
+# Copy application code
+COPY --link . .
-EXPOSE 3333
-EXPOSE 9229
+# Build application
+RUN yarn build:core
+RUN yarn build:landing
+
+# Remove development dependencies
+RUN yarn install --production=true
+
+
+# Final stage for app image
+FROM base
-CMD [ "node", "--inspect=0.0.0.0", "dist/apps/api/main.js" ]
+# Install packages needed for deployment
+RUN apt-get update -qq && \
+ apt-get install --no-install-recommends -y openssl && \
+ rm -rf /var/lib/apt/lists /var/cache/apt/archives
+
+RUN npm install -g ts-node@10.9.1
+
+# Copy built application
+COPY --from=build /app /app
+
+# Start the server by default, this can be overwritten at runtime
+EXPOSE 3333
+CMD [ "yarn", "run", "start:prod" ]
diff --git a/Dockerfile.db-migration b/Dockerfile.db-migration
deleted file mode 100644
index 154490ace..000000000
--- a/Dockerfile.db-migration
+++ /dev/null
@@ -1,11 +0,0 @@
-# Runs database migrations
-FROM node:16
-
-WORKDIR /usr/src/app
-
-COPY ./prisma ./prisma/
-
-RUN yarn add prisma
-
-# Generate prisma client - ensure that there are no OS differences
-CMD [ "npx", "prisma", "migrate", "deploy" ]
diff --git a/Dockerfile.e2e b/Dockerfile.e2e
index 581cfc1ef..8e3b1eaf3 100644
--- a/Dockerfile.e2e
+++ b/Dockerfile.e2e
@@ -1,4 +1,4 @@
-FROM mcr.microsoft.com/playwright:v1.27.0-focal
+FROM mcr.microsoft.com/playwright:v1.36.0
WORKDIR /usr/src/app
diff --git a/LICENSE.md b/LICENSE.md
index 934ab1d9f..e84f1499d 100644
--- a/LICENSE.md
+++ b/LICENSE.md
@@ -1,164 +1,11 @@
-GNU LESSER GENERAL PUBLIC LICENSE
-Version 3, 29 June 2007
+“Commons Clause” License Condition v1.0
-Copyright (C) 2007 Free Software Foundation, Inc.
-Everyone is permitted to copy and distribute verbatim copies
-of this license document, but changing it is not allowed.
+The Software is provided to you by the Licensor under the License, as defined below, subject to the following condition.
-This version of the GNU Lesser General Public License incorporates
-the terms and conditions of version 3 of the GNU General Public
-License, supplemented by the additional permissions listed below.
+Without limiting other conditions in the License, the grant of rights under the License will not include, and the License does not grant to you, right to Sell the Software.
-0. Additional Definitions.
+For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you under the License to provide to third parties, for a fee or other consideration (including without limitation fees for hosting or consulting/ support services related to the Software), a product or service whose value derives, entirely or substantially, from the functionality of the Software. Any license notice or attribution required by the License must also include this Commons Cause License Condition notice.
-As used herein, "this License" refers to version 3 of the GNU Lesser
-General Public License, and the "GNU GPL" refers to version 3 of the GNU
-General Public License.
-
-"The Library" refers to a covered work governed by this License,
-other than an Application or a Combined Work as defined below.
-
-An "Application" is any work that makes use of an interface provided
-by the Library, but which is not otherwise based on the Library.
-Defining a subclass of a class defined by the Library is deemed a mode
-of using an interface provided by the Library.
-
-A "Combined Work" is a work produced by combining or linking an
-Application with the Library. The particular version of the Library
-with which the Combined Work was made is also called the "Linked
-Version".
-
-The "Minimal Corresponding Source" for a Combined Work means the
-Corresponding Source for the Combined Work, excluding any source code
-for portions of the Combined Work that, considered in isolation, are
-based on the Application, and not on the Linked Version.
-
-The "Corresponding Application Code" for a Combined Work means the
-object code and/or source code for the Application, including any data
-and utility programs needed for reproducing the Combined Work from the
-Application, but excluding the System Libraries of the Combined Work.
-
-1. Exception to Section 3 of the GNU GPL.
-
-You may convey a covered work under sections 3 and 4 of this License
-without being bound by section 3 of the GNU GPL.
-
-2. Conveying Modified Versions.
-
-If you modify a copy of the Library, and, in your modifications, a
-facility refers to a function or data to be supplied by an Application
-that uses the facility (other than as an argument passed when the
-facility is invoked), then you may convey a copy of the modified
-version:
-
-a) under this License, provided that you make a good faith effort to
-ensure that, in the event an Application does not supply the
-function or data, the facility still operates, and performs
-whatever part of its purpose remains meaningful, or
-
-b) under the GNU GPL, with none of the additional permissions of
-this License applicable to that copy.
-
-3. Object Code Incorporating Material from Library Header Files.
-
-The object code form of an Application may incorporate material from
-a header file that is part of the Library. You may convey such object
-code under terms of your choice, provided that, if the incorporated
-material is not limited to numerical parameters, data structure
-layouts and accessors, or small macros, inline functions and templates
-(ten or fewer lines in length), you do both of the following:
-
-a) Give prominent notice with each copy of the object code that the
-Library is used in it and that the Library and its use are
-covered by this License.
-
-b) Accompany the object code with a copy of the GNU GPL and this license
-document.
-
-4. Combined Works.
-
-You may convey a Combined Work under terms of your choice that,
-taken together, effectively do not restrict modification of the
-portions of the Library contained in the Combined Work and reverse
-engineering for debugging such modifications, if you also do each of
-the following:
-
-a) Give prominent notice with each copy of the Combined Work that
-the Library is used in it and that the Library and its use are
-covered by this License.
-
-b) Accompany the Combined Work with a copy of the GNU GPL and this license
-document.
-
-c) For a Combined Work that displays copyright notices during
-execution, include the copyright notice for the Library among
-these notices, as well as a reference directing the user to the
-copies of the GNU GPL and this license document.
-
-d) Do one of the following:
-
- 0) Convey the Minimal Corresponding Source under the terms of this
- License, and the Corresponding Application Code in a form
- suitable for, and under terms that permit, the user to
- recombine or relink the Application with a modified version of
- the Linked Version to produce a modified Combined Work, in the
- manner specified by section 6 of the GNU GPL for conveying
- Corresponding Source.
-
- 1) Use a suitable shared library mechanism for linking with the
- Library. A suitable mechanism is one that (a) uses at run time
- a copy of the Library already present on the user's computer
- system, and (b) will operate properly with a modified version
- of the Library that is interface-compatible with the Linked
- Version.
-
-e) Provide Installation Information, but only if you would otherwise
-be required to provide such information under section 6 of the
-GNU GPL, and only to the extent that such information is
-necessary to install and execute a modified version of the
-Combined Work produced by recombining or relinking the
-Application with a modified version of the Linked Version. (If
-you use option 4d0, the Installation Information must accompany
-the Minimal Corresponding Source and Corresponding Application
-Code. If you use option 4d1, you must provide the Installation
-Information in the manner specified by section 6 of the GNU GPL
-for conveying Corresponding Source.)
-
-5. Combined Libraries.
-
-You may place library facilities that are a work based on the
-Library side by side in a single library together with other library
-facilities that are not Applications and are not covered by this
-License, and convey such a combined library under terms of your
-choice, if you do both of the following:
-
-a) Accompany the combined library with a copy of the same work based
-on the Library, uncombined with any other library facilities,
-conveyed under the terms of this License.
-
-b) Give prominent notice with the combined library that part of it
-is a work based on the Library, and explaining where to find the
-accompanying uncombined form of the same work.
-
-6. Revised Versions of the GNU Lesser General Public License.
-
-The Free Software Foundation may publish revised and/or new versions
-of the GNU Lesser General Public License from time to time. Such new
-versions will be similar in spirit to the present version, but may
-differ in detail to address new problems or concerns.
-
-Each version is given a distinguishing version number. If the
-Library as you received it specifies that a certain numbered version
-of the GNU Lesser General Public License "or any later version"
-applies to it, you have the option of following the terms and
-conditions either of that published version or of any later version
-published by the Free Software Foundation. If the Library as you
-received it does not specify a version number of the GNU Lesser
-General Public License, you may choose any version of the GNU Lesser
-General Public License ever published by the Free Software Foundation.
-
-If the Library as you received it specifies that a proxy can decide
-whether future versions of the GNU Lesser General Public License shall
-apply, that proxy's public statement of acceptance of any version is
-permanent authorization for you to choose that version for the
-Library.
+Software: Jetstream
+License: Apache 2.0
+Licensor: Jetstream Solutions, LLC
diff --git a/README.md b/README.md
index 6e02e6c7a..54b1457f7 100644
--- a/README.md
+++ b/README.md
@@ -8,20 +8,20 @@ The Jetstream platform makes managing your Salesforce instances a breeze. Use Je
Learn more by [reading the docs](https://docs.getjetstream.app/).
-**JETSTREAM IS OPEN SOURCE AND FREE TO USE. IF YOUR COMPANY IS GETTING VALUE, PLEASE CONSIDER SPONSORING THE PROJECT ❤️**
+**JETSTREAM IS SOURCE-AVAILABLE AND FREE TO USE. IF YOUR COMPANY IS GETTING VALUE, CONSIDER SPONSORING THE PROJECT ❤️**
+
+Jetstream wouldn't be possible without your contributions.
[![](https://img.shields.io/static/v1?label=Sponsor&message=%E2%9D%A4&logo=GitHub&color=%23fe8e86)](https://github.com/sponsors/jetstreamapp)
There are multiple ways to use Jetstream.
1. Use the hosted version at https://getjetstream.app
-2. Use the desktop version **TODO: COMING SOON**
-3. Run locally
+2. Run locally
1. Using nodejs
1. Building yourself (recommended if you want to contribute to the Jetstream codebase)
- 2. Using the pre-built version **TODO: COMING SOON**
2. Using Docker
-4. Want to self-host behind your company firewall? Reach out to the team for assistance.
+3. Want to self-host behind your company firewall? Reach out to the team for assistance.
# Overview of the codebase structure
@@ -42,7 +42,6 @@ This project was generated using [Nx](https://nx.dev) - This repository is consi
│ ├── jetstream-e2e
│ ├── jetstream-worker
│ ├── landing (LANDING PAGE WEBSITE)
-│ ├── landing-e2e
│ ├── maizzle (EMAIL TEMPLATE GENERATION)
│ └── ui-e2e
├── build (DESKTOP BUILD)
@@ -74,13 +73,39 @@ This project was generated using [Nx](https://nx.dev) - This repository is consi
**Pre-req**
-1. Make sure you have node 16 or 18 installed.
-2. If you want to run the dev server, make sure you have yarn installed.
+1. Make sure you have node 20 installed.
+2. If you are using docker, make sure you have Docker installed.
+3. If you want to run the dev server, make sure you have yarn version 1 installed.
📓 You can choose to skip authentication locally by setting the environment variable `EXAMPLE_USER_OVERRIDE=true`. This is set to true by default in the `.env.example` file.
🌟 To use this, don't click the login button, but instead just go to `http://localhost:3333/app` or `http://localhost:4200/app` (if running the react development server) directly.
-The easiest way to run Jetstream locally is to download the pre-built and transpiled javascript files and run them using NodeJs.
+### Using Docker
+
+If you have docker and just want to run the application locally, using docker is the easiest option.
+
+Build the docker image (this takes a while the first time).
+
+```shell
+docker build -t jetstream-app .
+```
+
+Use docker compose to create a dockerized postgres database and run the app.
+
+```shell
+docker compose up
+```
+
+- Jetstream will be running at `http://localhost:3333`
+- Postgres will be running on port `5555` if you wanted to connect to it locally.
+- When you click "Login", you should immediately be logged in without having to sign in.
+ - You can set `EXAMPLE_USER_OVERRIDE` if you want to disable this behavior
+- If assets on the page don't load, do a hard refresh (hold cmd or shift and press refresh)
+ - This might happen if you have re-built the image and the browser has cached the page with now missing resources.
+
+### Running without Docker
+
+Use this option if you want to contribute to the codebase.
Jetstream relies on a Postgres database, so you either need to [run Postgresql locally](https://www.postgresql.org/download/) or use a managed provider such as one from the list below. Optionally you can run jetstream in a Docker container which includes Postgresql.
@@ -107,12 +132,6 @@ If you want to create your own:
3. All other defaults are fine
3. Update the file named `.env` and replace `SFDC_CONSUMER_KEY` and `SFDC_CONSUMER_SECRET` with the values from your connected app.
-### Download pre-built application
-
-This is the fastest 🏃 way to run Jetstream locally.
-
-TODO: instructions to download and instructions to run
-
### Building
⭐ If you want to contribute to Jetstream, this is the best option.
@@ -167,7 +186,7 @@ TODO: instructions to download and instructions to run
## Desktop Application
-**TODO: THIS HAS NOT BEEN ENTIRELY WORKED OUT YET**
+This is a work in progress and may be removed as an available option.
### Local development
diff --git a/apps/api/.env.development b/apps/api/.env.development
new file mode 100644
index 000000000..49b79c58a
--- /dev/null
+++ b/apps/api/.env.development
@@ -0,0 +1,21 @@
+ENVIRONMENT="development"
+
+AUTH0_DOMAIN="getjetstream-dev.us.auth0.com"
+AUTH0_M2M_DOMAIN="getjetstream-dev.us.auth0.com"
+
+CONTENTFUL_HOST="https://api.contentful.com"
+
+GOOGLE_REDIRECT_URI="http://localhost:3333/oauth/google/callback"
+
+HONEYCOMB_ENABLED=false
+
+JETSTREAM_CLIENT_URL="http://localhost:4200/app"
+JETSTREAM_SERVER_DOMAIN="localhost:3333"
+JETSTREAM_SERVER_URL="http://localhost:3333"
+
+NX_AUTH_AUDIENCE="http://getjetstream.app/app_metadata"
+NX_BRANCH="main"
+NX_SFDC_API_VERSION="60.0"
+
+SFDC_API_VERSION="60.0"
+SFDC_CALLBACK_URL="http://localhost:3333/oauth/sfdc/callback"
diff --git a/apps/api/.env.production b/apps/api/.env.production
new file mode 100644
index 000000000..19a14262a
--- /dev/null
+++ b/apps/api/.env.production
@@ -0,0 +1,21 @@
+ENVIRONMENT="production"
+
+AUTH0_DOMAIN="auth.getjetstream.app"
+AUTH0_M2M_DOMAIN="getjetstream.us.auth0.com"
+
+CONTENTFUL_HOST="cdn.contentful.com"
+
+GOOGLE_REDIRECT_URI="https://getjetstream.app/oauth/google/callback"
+
+HONEYCOMB_ENABLED=true
+
+JETSTREAM_CLIENT_URL="https://getjetstream.app/app"
+JETSTREAM_SERVER_DOMAIN="getjetstream.app"
+JETSTREAM_SERVER_URL="https://getjetstream.app"
+
+NX_AUTH_AUDIENCE="http://getjetstream.app/app_metadata"
+NX_BRANCH="main"
+NX_SFDC_API_VERSION="59.0"
+
+SFDC_API_VERSION="59.0"
+SFDC_CALLBACK_URL="https://getjetstream.app/oauth/sfdc/callback"
diff --git a/apps/api/.eslintrc.json b/apps/api/.eslintrc.json
index 1df99e54c..9d9c0db55 100644
--- a/apps/api/.eslintrc.json
+++ b/apps/api/.eslintrc.json
@@ -1,13 +1,9 @@
{
- "extends": "../../.eslintrc.json",
- "rules": {},
+ "extends": ["../../.eslintrc.json"],
"ignorePatterns": ["!**/*"],
"overrides": [
{
"files": ["*.ts", "*.tsx", "*.js", "*.jsx"],
- "parserOptions": {
- "project": ["apps/api/tsconfig.*?.json"]
- },
"rules": {}
},
{
diff --git a/apps/api/Dockerfile b/apps/api/Dockerfile
new file mode 100644
index 000000000..ec590fa5a
--- /dev/null
+++ b/apps/api/Dockerfile
@@ -0,0 +1,24 @@
+# This file is generated by Nx.
+#
+# Build the docker image with `npx nx docker-build api`.
+# Tip: Modify "docker-build" options in project.json to change docker build args.
+#
+# Run the container with `docker run -p 3333:3333 -t api`.
+FROM docker.io/node:lts-alpine
+
+ENV HOST=0.0.0.0
+ENV PORT=3333
+
+WORKDIR /app
+
+RUN addgroup --system api && \
+ adduser --system -G api api
+
+COPY dist/apps/api api
+RUN chown -R api:api .
+
+# You can remove this install step if you build with `--bundle` option.
+# The bundled output will include external dependencies.
+RUN npm --prefix server --omit=dev -f install
+
+CMD [ "node", "server" ]
diff --git a/apps/api/jest.config.ts b/apps/api/jest.config.ts
index cd65436e0..6537324d2 100644
--- a/apps/api/jest.config.ts
+++ b/apps/api/jest.config.ts
@@ -1,8 +1,11 @@
/* eslint-disable */
export default {
- coverageDirectory: '../../coverage/apps/api',
- globals: {},
- displayName: 'api',
- testEnvironment: 'node',
+ displayName: 'server',
preset: '../../jest.preset.js',
+ testEnvironment: 'node',
+ transform: {
+ '^.+\\.[tj]s$': ['ts-jest', { tsconfig: '/tsconfig.spec.json' }],
+ },
+ moduleFileExtensions: ['ts', 'js', 'html'],
+ coverageDirectory: '../../coverage/apps/server',
};
diff --git a/apps/api/project.json b/apps/api/project.json
index 120b201fc..00bd07d06 100644
--- a/apps/api/project.json
+++ b/apps/api/project.json
@@ -3,82 +3,95 @@
"$schema": "../../node_modules/nx/schemas/project-schema.json",
"sourceRoot": "apps/api/src",
"projectType": "application",
- "prefix": "api",
- "generators": {},
"targets": {
"build": {
- "executor": "@nrwl/webpack:webpack",
+ "executor": "@nx/esbuild:esbuild",
+ "outputs": ["{options.outputPath}"],
+ "defaultConfiguration": "production",
"options": {
+ "platform": "node",
"outputPath": "dist/apps/api",
+ "format": ["cjs"],
+ "bundle": true,
"main": "apps/api/src/main.ts",
"tsConfig": "apps/api/tsconfig.app.json",
- "assets": ["apps/api/src/assets"],
+ "assets": [
+ {
+ "glob": "**/*",
+ "input": "apps/api/src/assets",
+ "output": "assets",
+ "ignore": [".gitkeep"]
+ }
+ ],
"generatePackageJson": true,
- "webpackConfig": "webpack-server.config.js",
- "target": "node",
- "compiler": "tsc"
+ "sourcemap": true,
+ "esbuildOptions": {
+ "sourcemap": true,
+ "outExtension": {
+ ".js": ".js"
+ }
+ }
},
"configurations": {
+ "development": {
+ "inspect": true
+ },
"production": {
- "optimization": true,
- "extractLicenses": true,
- "inspect": false,
- "sourceMap": true,
- "fileReplacements": [
+ "assets": [
{
- "replace": "apps/api/src/environments/environment.ts",
- "with": "apps/api/src/environments/environment.prod.ts"
+ "glob": "**/*",
+ "input": "apps/api/src/assets",
+ "output": "assets",
+ "ignore": [".gitkeep"]
}
- ]
- },
- "docker": {
- "optimization": false,
- "extractLicenses": false,
- "inspect": true,
- "sourceMap": true,
- "fileReplacements": [
- {
- "replace": "apps/api/src/environments/environment.ts",
- "with": "apps/api/src/environments/environment.prod.ts"
+ ],
+ "esbuildOptions": {
+ "sourcemap": true,
+ "outExtension": {
+ ".js": ".js"
}
- ]
- },
- "test": {
- "optimization": false,
- "extractLicenses": false,
- "inspect": true,
+ },
"fileReplacements": [
{
"replace": "apps/api/src/environments/environment.ts",
- "with": "apps/api/src/environments/environment.test.ts"
+ "with": "apps/api/src/environments/environment.prod.ts"
}
]
}
- },
- "outputs": ["{options.outputPath}"]
+ }
},
"serve": {
- "executor": "@nrwl/node:node",
+ "executor": "@nx/js:node",
+ "defaultConfiguration": "development",
"options": {
"buildTarget": "api:build",
- "inspect": true,
+ "inspect": "inspect",
"port": 7777
+ },
+ "configurations": {
+ "development": {
+ "buildTarget": "api:build:development"
+ },
+ "production": {
+ "buildTarget": "api:build:production"
+ }
}
},
"lint": {
- "executor": "@nrwl/linter:eslint",
- "options": {
- "lintFilePatterns": ["apps/api/**/*.ts", "apps/api/**/*.spec.ts", "apps/api/**/*.d.ts"]
- }
+ "executor": "@nx/eslint:lint",
+ "outputs": ["{options.outputFile}"]
},
"test": {
- "executor": "@nrwl/jest:jest",
+ "executor": "@nx/jest:jest",
+ "outputs": ["{workspaceRoot}/coverage/{projectRoot}"],
"options": {
- "jestConfig": "apps/api/jest.config.ts",
- "passWithNoTests": true
- },
- "outputs": ["{workspaceRoot}/coverage/apps/api"]
+ "jestConfig": "apps/api/jest.config.ts"
+ }
+ },
+ "docker-build": {
+ "dependsOn": ["build"],
+ "command": "docker build -f apps/api/Dockerfile . -t api"
}
},
- "tags": []
+ "tags": ["server"]
}
diff --git a/apps/api/src/app/controllers/auth.controller.ts b/apps/api/src/app/controllers/auth.controller.ts
index b54f8fb55..4671cca49 100644
--- a/apps/api/src/app/controllers/auth.controller.ts
+++ b/apps/api/src/app/controllers/auth.controller.ts
@@ -30,17 +30,17 @@ export async function login(req: Request, res: Response) {
const user = req.user as UserProfileServer;
req.logIn(user, async (err) => {
if (err) {
- logger.warn('[AUTH][ERROR] Error logging in %o', err);
+ logger.warn('[AUTH][ERROR] Error logging in %o', err, { requestId: res.locals.requestId });
return res.redirect('/');
}
createOrUpdateUser(user)
.then(async ({ user: _user }) => {
- logger.info('[AUTH][SUCCESS] Logged in %s', _user.email, { userId: user.id });
+ logger.info('[AUTH][SUCCESS] Logged in %s', _user.email, { userId: user.id, requestId: res.locals.requestId });
res.redirect(ENV.JETSTREAM_CLIENT_URL!);
})
.catch((err) => {
- logger.error('[AUTH][DB][ERROR] Error creating or sending welcome email %o', err);
+ logger.error('[AUTH][DB][ERROR] Error creating or sending welcome email %o', err, { requestId: res.locals.requestId });
res.redirect('/');
});
});
@@ -59,28 +59,28 @@ export async function callback(req: Request, res: Response, next: NextFunction)
},
(err, user, info) => {
if (err) {
- logger.warn('[AUTH][ERROR] Error with authentication %o', err);
+ logger.warn('[AUTH][ERROR] Error with authentication %o', err, { requestId: res.locals.requestId });
return next(new AuthenticationError(err));
}
if (!user) {
- logger.warn('[AUTH][ERROR] no user');
- logger.warn('[AUTH][ERROR] no info %o', info);
+ logger.warn('[AUTH][ERROR] no user', { requestId: res.locals.requestId });
+ logger.warn('[AUTH][ERROR] no info %o', info, { requestId: res.locals.requestId });
return res.redirect('/oauth/login');
}
req.logIn(user, async (err) => {
if (err) {
- logger.warn('[AUTH][ERROR] Error logging in %o', err);
+ logger.warn('[AUTH][ERROR] Error logging in %o', err, { requestId: res.locals.requestId });
return next(new AuthenticationError(err));
}
createOrUpdateUser(user).catch((err) => {
- logger.error('[AUTH][DB][ERROR] Error creating or sending welcome email %o', err);
+ logger.error('[AUTH][DB][ERROR] Error creating or sending welcome email %o', err, { requestId: res.locals.requestId });
});
// TODO: confirm returnTo 0 it suddenly was reported as bad
const returnTo = (req.session as any).returnTo;
delete (req.session as any).returnTo;
- logger.info('[AUTH][SUCCESS] Logged in %s', user.email, { userId: user.id });
+ logger.info('[AUTH][SUCCESS] Logged in %s', user.email, { userId: user.id, requestId: res.locals.requestId });
res.redirect(returnTo || ENV.JETSTREAM_CLIENT_URL);
});
}
@@ -115,14 +115,14 @@ export async function linkCallback(req: Request, res: Response, next: NextFuncti
clientUrl: new URL(ENV.JETSTREAM_CLIENT_URL!).origin,
};
if (err) {
- logger.warn('[AUTH][LINK][ERROR] Error with authentication %o', err);
+ logger.warn('[AUTH][LINK][ERROR] Error with authentication %o', err, { requestId: res.locals.requestId });
params.error = isString(err) ? err : err.message || 'Unknown Error';
params.message = (req.query.error_description as string) || undefined;
return res.redirect(`/oauth-link/?${new URLSearchParams(params as any).toString()}`);
}
if (!userProfile) {
- logger.warn('[AUTH][LINK][ERROR] no user');
- logger.warn('[AUTH][LINK][ERROR] no info %o', info);
+ logger.warn('[AUTH][LINK][ERROR] no user', { requestId: res.locals.requestId });
+ logger.warn('[AUTH][LINK][ERROR] no info %o', info, { requestId: res.locals.requestId });
params.error = 'Authentication Error';
params.message = (req.query.error_description as string) || undefined;
return res.redirect(`/oauth-link/?${new URLSearchParams(params as any).toString()}`);
@@ -140,12 +140,13 @@ export async function linkCallback(req: Request, res: Response, next: NextFuncti
logger.warn('[AUTH0][IDENTITY][LINK][ERROR] Failed to delete the secondary user orgs %s', userProfile.user_id, {
userId: user.id,
secondaryUserId: userProfile.user_id,
+ requestId: res.locals.requestId,
});
}
return res.redirect(`/oauth-link/?${new URLSearchParams(params as any).toString()}`);
} catch (ex) {
- logger.warn('[AUTH][LINK][ERROR] Error linking account %o', err);
+ logger.warn('[AUTH][LINK][ERROR] Error linking account %o', err, { requestId: res.locals.requestId });
params.error = 'Unexpected Error';
return res.redirect(`/oauth-link/?${new URLSearchParams(params as any).toString()}&clientUrl=${ENV.JETSTREAM_CLIENT_URL}`);
}
diff --git a/apps/api/src/app/controllers/oauth.controller.ts b/apps/api/src/app/controllers/oauth.controller.ts
index 6cac681d8..765082007 100644
--- a/apps/api/src/app/controllers/oauth.controller.ts
+++ b/apps/api/src/app/controllers/oauth.controller.ts
@@ -1,5 +1,5 @@
import { ENV, logger } from '@jetstream/api-config';
-import { SalesforceOrgUi, SObjectOrganization, UserProfileServer } from '@jetstream/types';
+import { SObjectOrganization, SalesforceOrgUi, UserProfileServer } from '@jetstream/types';
import * as express from 'express';
import * as jsforce from 'jsforce';
import * as salesforceOrgsDb from '../db/salesforce-org.db';
@@ -51,7 +51,7 @@ export async function salesforceOauthCallback(req: express.Request, res: express
returnParams.message = req.query.error_description
? (req.query.error_description as string)
: 'There was an error authenticating with Salesforce.';
- logger.info('[OAUTH][ERROR] %s', req.query.error, { ...req.query });
+ logger.info('[OAUTH][ERROR] %s', req.query.error, { ...req.query, requestId: res.locals.requestId });
return res.redirect(`/oauth-link/?${new URLSearchParams(returnParams as any).toString()}`);
}
@@ -78,7 +78,7 @@ export async function salesforceOauthCallback(req: express.Request, res: express
return res.redirect(`/oauth-link/?${new URLSearchParams(returnParams as any).toString()}`);
} catch (ex) {
const userInfo = req.user ? { username: (req.user as any)?.displayName, userId: (req.user as any)?.user_id } : undefined;
- logger.info('[OAUTH][ERROR] %o', ex.message, { userInfo });
+ logger.info('[OAUTH][ERROR] %o', ex.message, { userInfo, requestId: res.locals.requestId });
returnParams.error = ex.message || 'Unexpected Error';
returnParams.message = req.query.error_description
? (req.query.error_description as string)
@@ -109,7 +109,7 @@ export async function initConnectionFromOAuthResponse({
companyInfoRecord = results.records[0];
}
} catch (ex) {
- logger.warn(ex);
+ logger.warn('Error getting org info %o', ex);
}
const orgName = companyInfoRecord?.Name || 'Unknown Organization';
diff --git a/apps/api/src/app/controllers/orgs.controller.ts b/apps/api/src/app/controllers/orgs.controller.ts
index 330dbab67..c0915ce85 100644
--- a/apps/api/src/app/controllers/orgs.controller.ts
+++ b/apps/api/src/app/controllers/orgs.controller.ts
@@ -58,10 +58,10 @@ export async function checkOrgHealth(req: Request, res: Response, next: NextFunc
try {
await conn.identity();
connectionError = null;
- logger.warn('[ORG CHECK][VALID ORG]');
+ logger.warn('[ORG CHECK][VALID ORG]', { requestId: res.locals.requestId });
} catch (ex) {
connectionError = ERROR_MESSAGES.SFDC_EXPIRED_TOKEN;
- logger.warn('[ORG CHECK][INVALID ORG] %s', ex.message);
+ logger.warn('[ORG CHECK][INVALID ORG] %s', ex.message, { requestId: res.locals.requestId });
}
try {
@@ -69,7 +69,7 @@ export async function checkOrgHealth(req: Request, res: Response, next: NextFunc
await salesforceOrgsDb.updateOrg_UNSAFE(org, { connectionError });
}
} catch (ex) {
- logger.warn('[ERROR UPDATING INVALID ORG] %s', ex.message, { error: ex.message, userInfo });
+ logger.warn('[ERROR UPDATING INVALID ORG] %s', ex.message, { error: ex.message, userInfo, requestId: res.locals.requestId });
}
if (connectionError) {
diff --git a/apps/api/src/app/controllers/sf-bulk-api.controller.ts b/apps/api/src/app/controllers/sf-bulk-api.controller.ts
index c5c0940e9..15cd05b33 100644
--- a/apps/api/src/app/controllers/sf-bulk-api.controller.ts
+++ b/apps/api/src/app/controllers/sf-bulk-api.controller.ts
@@ -13,7 +13,7 @@ import { sendJson } from '../utils/response.handlers';
export const routeValidators = {
createJob: [
- body('type').isIn(['INSERT', 'UPDATE', 'UPSERT', 'DELETE', 'QUERY']),
+ body('type').isIn(['INSERT', 'UPDATE', 'UPSERT', 'DELETE', 'QUERY', 'QUERY_ALL']),
body('sObject').isString(),
body('serialMode').optional().isBoolean(),
body('externalIdFieldName').optional().isString(),
@@ -186,7 +186,6 @@ export async function downloadResults(req: Request, res: Response, next: NextFun
let isFirstChunk = true;
csvParseStream.on('data', (data) => {
- console.log('DATA: %o', data);
data = JSON.stringify(data);
if (isFirstChunk) {
isFirstChunk = false;
@@ -197,16 +196,21 @@ export async function downloadResults(req: Request, res: Response, next: NextFun
res.write(data);
});
csvParseStream.on('finish', () => {
- console.log('FINISH');
res.write(']}');
- res.status(200).send();
+ if (!res.headersSent) {
+ res.status(200).send();
+ } else {
+ logger.warn('Response headers already sent. csvParseStream[finish]', { requestId: res.locals.requestId });
+ }
});
csvParseStream.on('error', (err) => {
- logger.warn('Error streaming files from Salesforce. %o', err);
- res.status(400).send();
+ logger.warn('Error streaming files from Salesforce. %o', err, { requestId: res.locals.requestId });
+ if (!res.headersSent) {
+ res.status(400).send();
+ } else {
+ logger.warn('Response headers already sent. csvParseStream[error]', { requestId: res.locals.requestId });
+ }
});
-
- // csvParseStream.pipe(res);
} catch (ex) {
next(new UserFacingError(ex.message));
}
diff --git a/apps/api/src/app/controllers/sf-metadata-tooling.controller.ts b/apps/api/src/app/controllers/sf-metadata-tooling.controller.ts
index 0a12daee4..9161b209f 100644
--- a/apps/api/src/app/controllers/sf-metadata-tooling.controller.ts
+++ b/apps/api/src/app/controllers/sf-metadata-tooling.controller.ts
@@ -1,14 +1,21 @@
import { ENV, logger } from '@jetstream/api-config';
import { HTTP, LOG_LEVELS, MIME_TYPES } from '@jetstream/shared/constants';
import { ensureArray, getValueOrSoapNull, sanitizeForXml, splitArrayToMaxSize, toBoolean } from '@jetstream/shared/utils';
-import { AnonymousApexResponse, AnonymousApexSoapResponse, ApexCompletionResponse, ListMetadataResult, MapOf } from '@jetstream/types';
+import { AnonymousApexResponse, ApexCompletionResponse, ListMetadataResult, MapOf } from '@jetstream/types';
import { NextFunction, Request, Response } from 'express';
import { body, param, query } from 'express-validator';
import type { DeployOptions, RetrieveRequest } from 'jsforce';
import * as jsforce from 'jsforce';
-import * as JSZip from 'jszip';
+import JSZip from 'jszip';
import { isObject, isString, toNumber } from 'lodash';
-import { buildPackageXml, getRetrieveRequestFromListMetadata, getRetrieveRequestFromManifest } from '../services/sf-misc';
+import xml2js from 'xml2js';
+import {
+ SalesforceRequestViaAxiosOptions,
+ buildPackageXml,
+ getRetrieveRequestFromListMetadata,
+ getRetrieveRequestFromManifest,
+ salesforceRequestViaAxios,
+} from '../services/sf-misc';
import { UserFacingError } from '../utils/error-handler';
import { sendJson } from '../utils/response.handlers';
@@ -117,7 +124,7 @@ export async function deployMetadata(req: Request, res: Response, next: NextFunc
const zip = new JSZip();
files.forEach((file) => zip.file(file.fullFilename, file.content));
- const results = await conn.metadata.deploy(zip.generateNodeStream(), req.body.options);
+ const results = await conn.metadata.deploy(zip.generateNodeStream() as any, req.body.options);
sendJson(res, results);
} catch (ex) {
@@ -296,7 +303,7 @@ export async function checkRetrieveStatusAndRedeploy(req: Request, res: Response
sendJson(res, { type: 'deploy', results: correctInvalidXmlResponseTypes(deployResults), zipFile: results.zipFile });
} else {
// Deploy package as-is
- const deployResults = await targetConn.metadata.deploy(oldPackage.generateNodeStream(), deployOptions);
+ const deployResults = await targetConn.metadata.deploy(oldPackage.generateNodeStream() as any, deployOptions);
sendJson(res, { type: 'deploy', results: correctInvalidXmlResponseTypes(deployResults), zipFile: results.zipFile });
}
} else {
@@ -329,7 +336,8 @@ export async function anonymousApex(req: Request, res: Response, next: NextFunct
let { apex, logLevel }: { apex: string; logLevel?: string } = req.body;
logLevel = logLevel || 'FINEST';
const conn: jsforce.Connection = res.locals.jsforceConn;
- const requestOptions: jsforce.RequestInfo = {
+ const requestOptions: SalesforceRequestViaAxiosOptions = {
+ conn,
method: 'POST',
url: `${conn.instanceUrl}/services/Soap/s/${conn.version}`,
headers: {
@@ -381,7 +389,7 @@ export async function anonymousApex(req: Request, res: Response, next: NextFunct
- ${conn.accessToken}
+ {sessionId}
@@ -392,22 +400,27 @@ export async function anonymousApex(req: Request, res: Response, next: NextFunct
`,
};
- const soapResponse = await conn.request(requestOptions, { responseType: 'text/xml' });
- const header = soapResponse['soapenv:Envelope']['soapenv:Header'];
- const body = soapResponse?.['soapenv:Envelope']?.['soapenv:Body']?.executeAnonymousResponse?.result;
- const results: AnonymousApexResponse = {
- debugLog: header?.DebuggingInfo?.debugLog || '',
- result: {
- column: toNumber(getValueOrSoapNull(body.column) || -1),
- compileProblem: getValueOrSoapNull(body.compileProblem) || null,
- compiled: toBoolean(getValueOrSoapNull(body.compiled)) || false,
- exceptionMessage: getValueOrSoapNull(body.exceptionMessage) || null,
- exceptionStackTrace: getValueOrSoapNull(body.exceptionStackTrace) || null,
- line: toNumber(getValueOrSoapNull(body.line)) || -1,
- success: toBoolean(getValueOrSoapNull(body.success)) || false,
- },
- };
- sendJson(res, results);
+ const response = await salesforceRequestViaAxios(requestOptions);
+ if (!response.error) {
+ const soapResponse = await xml2js.parseStringPromise(response.body, { explicitArray: false });
+ const header = soapResponse['soapenv:Envelope']['soapenv:Header'];
+ const body = soapResponse['soapenv:Envelope']?.['soapenv:Body']?.executeAnonymousResponse?.result;
+ const results: AnonymousApexResponse = {
+ debugLog: header?.DebuggingInfo?.debugLog || '',
+ result: {
+ column: toNumber(getValueOrSoapNull(body.column) || -1),
+ compileProblem: getValueOrSoapNull(body.compileProblem) || null,
+ compiled: toBoolean(getValueOrSoapNull(body.compiled)) || false,
+ exceptionMessage: getValueOrSoapNull(body.exceptionMessage) || null,
+ exceptionStackTrace: getValueOrSoapNull(body.exceptionStackTrace) || null,
+ line: toNumber(getValueOrSoapNull(body.line)) || -1,
+ success: toBoolean(getValueOrSoapNull(body.success)) || false,
+ },
+ };
+ sendJson(res, results);
+ } else {
+ next(new UserFacingError(response.errorMessage));
+ }
} catch (ex) {
next(ex);
}
@@ -426,7 +439,7 @@ export async function apexCompletions(req: Request, res: Response, next: NextFun
},
};
- logger.info(requestOptions.url);
+ logger.info('Apex Completion %s', requestOptions.url, { requestId: res.locals.requestId });
const completions = await conn.request(requestOptions);
sendJson(res, completions);
diff --git a/apps/api/src/app/controllers/sf-misc.controller.ts b/apps/api/src/app/controllers/sf-misc.controller.ts
index 1aed1a3bd..2edb7e45c 100644
--- a/apps/api/src/app/controllers/sf-misc.controller.ts
+++ b/apps/api/src/app/controllers/sf-misc.controller.ts
@@ -1,16 +1,13 @@
-import { ORG_VERSION_PLACEHOLDER } from '@jetstream/shared/constants';
import { toBoolean } from '@jetstream/shared/utils';
import { GenericRequestPayload, ManualRequestPayload, ManualRequestResponse } from '@jetstream/types';
-import axios, { AxiosError, AxiosRequestConfig } from 'axios';
import { NextFunction, Request, Response } from 'express';
import { body, query } from 'express-validator';
import * as jsforce from 'jsforce';
import { isObject, isString } from 'lodash';
+import * as request from 'superagent';
+import { salesforceRequestViaAxios } from '../services/sf-misc';
import { UserFacingError } from '../utils/error-handler';
import { sendJson } from '../utils/response.handlers';
-import * as request from 'superagent';
-
-const SESSION_ID_RGX = /\{sessionId\}/i;
export const routeValidators = {
getFrontdoorLoginUrl: [],
@@ -104,75 +101,18 @@ export async function makeJsforceRequest(req: Request, res: Response, next: Next
export async function makeJsforceRequestViaAxios(req: Request, res: Response, next: NextFunction) {
try {
- const { method, headers } = req.body as ManualRequestPayload;
- let { url } = req.body as ManualRequestPayload;
- let { body } = req.body as ManualRequestPayload;
+ const { method, headers, body, url } = req.body as ManualRequestPayload;
const conn: jsforce.Connection = res.locals.jsforceConn;
- url = url.replace(ORG_VERSION_PLACEHOLDER, conn.version);
-
- const config: AxiosRequestConfig = {
+ const response = await salesforceRequestViaAxios({
+ conn,
url,
method,
- baseURL: conn.instanceUrl,
- // X-SFDC-Session is used for some SOAP APIs, such as the bulk api
- headers: { ...(headers || {}), ['Authorization']: `Bearer ${conn.accessToken}`, ['X-SFDC-Session']: conn.accessToken },
- responseType: 'text',
- validateStatus: null,
- timeout: 120000,
- transformResponse: [], // required to avoid automatic json parsing
- };
+ headers,
+ body,
+ });
- if (isString(body) && SESSION_ID_RGX.test(body)) {
- body = body.replace(SESSION_ID_RGX, conn.accessToken);
- }
-
- if (method !== 'GET' && body) {
- config.data = body;
- }
-
- axios
- .request(config)
- .then((response) => {
- sendJson(res, {
- error: response.status < 200 || response.status > 300,
- status: response.status,
- statusText: response.statusText,
- headers: JSON.stringify(response.headers || {}, null, 2),
- body: response.data,
- });
- })
- .catch((error: AxiosError) => {
- if (error.isAxiosError) {
- if (error.response) {
- return sendJson(res, {
- error: true,
- errorMessage: null,
- status: error.response.status,
- statusText: error.response.statusText,
- headers: JSON.stringify(error.response.headers || {}, null, 2),
- body: error.response.data as any,
- });
- } else if (error.request) {
- return sendJson(res, {
- error: true,
- errorMessage: error.message || 'An unknown error has occurred.',
- status: null,
- statusText: null,
- headers: null,
- body: null,
- });
- }
- }
- sendJson(res, {
- error: true,
- errorMessage: error.message || 'An unknown error has occurred, the request was not made.',
- status: null,
- statusText: null,
- headers: null,
- body: null,
- });
- });
+ sendJson(res, response);
} catch (ex) {
next(new UserFacingError(ex.message));
}
diff --git a/apps/api/src/app/controllers/sf-query.controller.ts b/apps/api/src/app/controllers/sf-query.controller.ts
index 297594b65..1f60b9fc8 100644
--- a/apps/api/src/app/controllers/sf-query.controller.ts
+++ b/apps/api/src/app/controllers/sf-query.controller.ts
@@ -1,9 +1,9 @@
import { NextFunction, Request, Response } from 'express';
+import { body, query as queryString } from 'express-validator';
import * as jsforce from 'jsforce';
import * as queryService from '../services/query';
-import { sendJson } from '../utils/response.handlers';
import { UserFacingError } from '../utils/error-handler';
-import { body, query as queryString } from 'express-validator';
+import { sendJson } from '../utils/response.handlers';
export const routeValidators = {
query: [body('query').isString()],
diff --git a/apps/api/src/app/controllers/socket.controller.ts b/apps/api/src/app/controllers/socket.controller.ts
index 4ccfc91fe..67e61d06d 100644
--- a/apps/api/src/app/controllers/socket.controller.ts
+++ b/apps/api/src/app/controllers/socket.controller.ts
@@ -1,23 +1,17 @@
-import { ENV, logger } from '@jetstream/api-config';
+import { logger } from '@jetstream/api-config';
import { UserProfileServer } from '@jetstream/types';
import * as cometdClient from 'cometd-nodejs-client';
import * as express from 'express';
-import { createServer, IncomingMessage } from 'http';
+import { IncomingMessage, createServer } from 'http';
+import { nanoid } from 'nanoid';
import { Server, Socket } from 'socket.io';
import { ExtendedError } from 'socket.io/dist/namespace';
import { DefaultEventsMap } from 'socket.io/dist/typed-events';
-import { nanoid } from 'nanoid';
import { environment } from '../../environments/environment';
import * as socketUtils from '../utils/socket-utils';
-const serverUrl = ENV.JETSTREAM_SERVER_URL;
cometdClient.adapt();
-/**
- * FIXME: https://socket.io/docs/v4/pm2/
- * pm2 will cause issues!
- */
-
let io: Server;
const wrapMiddleware =
diff --git a/apps/api/src/app/controllers/user.controller.ts b/apps/api/src/app/controllers/user.controller.ts
index 54b5257bf..4655d874e 100644
--- a/apps/api/src/app/controllers/user.controller.ts
+++ b/apps/api/src/app/controllers/user.controller.ts
@@ -1,15 +1,16 @@
-import { logger, mailgun } from '@jetstream/api-config';
-import { UserProfileServer } from '@jetstream/types';
+import { ENV, logger, mailgun } from '@jetstream/api-config';
+import { UserProfileAuth0Ui, UserProfileServer, UserProfileUi, UserProfileUiWithIdentities } from '@jetstream/types';
import { AxiosError } from 'axios';
import * as express from 'express';
import { body, query as queryString } from 'express-validator';
import { deleteUserAndOrgs } from '../db/transactions.db';
+import * as userDbService from '../db/user.db';
import * as auth0Service from '../services/auth0';
import { UserFacingError } from '../utils/error-handler';
import { sendJson } from '../utils/response.handlers';
export const routeValidators = {
- updateProfile: [body('name').isString().isLength({ min: 1, max: 255 })],
+ updateProfile: [body('name').isString().isLength({ min: 1, max: 255 }), body('preferences').isObject().optional()],
unlinkIdentity: [queryString('provider').isString().isLength({ min: 1 }), queryString('userId').isString().isLength({ min: 1 })],
resendVerificationEmail: [queryString('provider').isString().isLength({ min: 1 }), queryString('userId').isString().isLength({ min: 1 })],
deleteAccount: [body('reason').isString().optional()],
@@ -45,33 +46,85 @@ export async function emailSupport(req: express.Request, res: express.Response)
}),
'h:Reply-To': 'support@getjetstream.app',
});
- logger.info('[SUPPORT EMAIL][EMAIL SENT] %s', results.id);
+ logger.info('[SUPPORT EMAIL][EMAIL SENT] %s', results.id, { requestId: res.locals.requestId });
sendJson(res);
} catch (ex) {
- logger.error('[SUPPORT EMAIL][ERROR] %s', ex.message || 'An unknown error has occurred.', { userId: user.id });
- logger.error(ex.stack);
+ logger.error('[SUPPORT EMAIL][ERROR] %s', ex.message || 'An unknown error has occurred.', {
+ userId: user.id,
+ requestId: res.locals.requestId,
+ });
+ logger.error('%o', ex.stack, { requestId: res.locals.requestId });
throw new UserFacingError('There was a problem sending the email');
}
}
export async function getUserProfile(req: express.Request, res: express.Response) {
- const user = req.user as UserProfileServer;
- sendJson(res, user._json);
+ const auth0User = req.user as UserProfileServer;
+
+ // use fallback locally and on CI
+ if (ENV.EXAMPLE_USER_OVERRIDE && ENV.EXAMPLE_USER_PROFILE && req.hostname === 'localhost') {
+ sendJson(res, ENV.EXAMPLE_USER_PROFILE);
+ return;
+ }
+
+ const user = await userDbService.findByUserId(auth0User.id);
+ if (!user) {
+ throw new UserFacingError('User not found');
+ }
+ const userProfileUi: UserProfileUi = {
+ ...(auth0User._json as any),
+ id: user.id,
+ userId: user.userId,
+ createdAt: user.createdAt.toISOString(),
+ updatedAt: user.updatedAt.toISOString(),
+ preferences: {
+ skipFrontdoorLogin: user.preferences?.skipFrontdoorLogin,
+ },
+ };
+ sendJson(res, userProfileUi);
+}
+
+async function getFullUserProfileFn(sessionUser: UserProfileServer, auth0User?: UserProfileAuth0Ui) {
+ auth0User = auth0User || (await auth0Service.getUser(sessionUser));
+ const jetstreamUser = await userDbService.findByUserId(sessionUser.id);
+ if (!jetstreamUser) {
+ throw new UserFacingError('User not found');
+ }
+ const response: UserProfileUiWithIdentities = {
+ id: jetstreamUser.id,
+ userId: sessionUser.id,
+ name: jetstreamUser.name || '',
+ email: jetstreamUser.email,
+ emailVerified: auth0User.email_verified,
+ username: auth0User.username || '',
+ nickname: auth0User.nickname,
+ picture: auth0User.picture,
+ preferences: {
+ skipFrontdoorLogin: jetstreamUser.preferences?.skipFrontdoorLogin ?? false,
+ },
+ identities: auth0User.identities,
+ createdAt: jetstreamUser.createdAt.toISOString(),
+ updatedAt: jetstreamUser.updatedAt.toISOString(),
+ };
+ return response;
}
/** Get profile from Auth0 */
export async function getFullUserProfile(req: express.Request, res: express.Response) {
const user = req.user as UserProfileServer;
try {
- const auth0User = await auth0Service.getUser(user);
- sendJson(res, auth0User);
+ const response = await getFullUserProfileFn(user);
+ sendJson(res, response);
} catch (ex) {
if (ex.isAxiosError) {
const error: AxiosError = ex;
if (error.response) {
- logger.error('[AUTH0][PROFILE FETCH][ERROR] %o', error.response.data, { userId: user.id });
+ logger.error('[AUTH0][PROFILE FETCH][ERROR] %o', error.response.data, { userId: user.id, requestId: res.locals.requestId });
} else if (error.request) {
- logger.error('[AUTH0][PROFILE FETCH][ERROR] %s', error.message || 'An unknown error has occurred.', { userId: user.id });
+ logger.error('[AUTH0][PROFILE FETCH][ERROR] %s', error.message || 'An unknown error has occurred.', {
+ userId: user.id,
+ requestId: res.locals.requestId,
+ });
}
}
throw new UserFacingError('There was an error obtaining your profile information');
@@ -80,18 +133,24 @@ export async function getFullUserProfile(req: express.Request, res: express.Resp
export async function updateProfile(req: express.Request, res: express.Response) {
const user = req.user as UserProfileServer;
- const userProfile = { name: req.body.name };
+ const userProfile = req.body as UserProfileUiWithIdentities;
try {
+ // check for name change, if so call auth0 to update
const auth0User = await auth0Service.updateUser(user, userProfile);
- sendJson(res, auth0User);
+ // update name and preferences locally
+ const response = await getFullUserProfileFn(user, auth0User);
+ sendJson(res, response);
} catch (ex) {
if (ex.isAxiosError) {
const error: AxiosError = ex;
if (error.response) {
- logger.error('[AUTH0][PROFILE][ERROR] %o', error.response.data, { userId: user.id });
+ logger.error('[AUTH0][PROFILE][ERROR] %o', error.response.data, { userId: user.id, requestId: res.locals.requestId });
} else if (error.request) {
- logger.error('[AUTH0][PROFILE][ERROR] %s', error.message || 'An unknown error has occurred.', { userId: user.id });
+ logger.error('[AUTH0][PROFILE][ERROR] %s', error.message || 'An unknown error has occurred.', {
+ userId: user.id,
+ requestId: res.locals.requestId,
+ });
}
}
throw new UserFacingError('There was an error updating the user profile');
@@ -105,14 +164,18 @@ export async function unlinkIdentity(req: express.Request, res: express.Response
const userId = req.query.userId as string;
const auth0User = await auth0Service.unlinkIdentity(user, { provider, userId });
- sendJson(res, auth0User);
+ const response = await getFullUserProfileFn(user, auth0User);
+ sendJson(res, response);
} catch (ex) {
if (ex.isAxiosError) {
const error: AxiosError = ex;
if (error.response) {
- logger.error('[AUTH0][UNLINK][ERROR] %o', error.response.data, { userId: user.id });
+ logger.error('[AUTH0][UNLINK][ERROR] %o', error.response.data, { userId: user.id, requestId: res.locals.requestId });
} else if (error.request) {
- logger.error('[AUTH0][UNLINK][ERROR] %s', error.message || 'An unknown error has occurred.', { userId: user.id });
+ logger.error('[AUTH0][UNLINK][ERROR] %s', error.message || 'An unknown error has occurred.', {
+ userId: user.id,
+ requestId: res.locals.requestId,
+ });
}
}
throw new UserFacingError('There was an error unlinking the account');
@@ -124,15 +187,18 @@ export async function resendVerificationEmail(req: express.Request, res: express
const provider = req.query.provider as string;
const userId = req.query.userId as string;
try {
- const auth0User = await auth0Service.resendVerificationEmail(user, { provider, userId });
- sendJson(res, auth0User);
+ await auth0Service.resendVerificationEmail(user, { provider, userId });
+ sendJson(res);
} catch (ex) {
if (ex.isAxiosError) {
const error: AxiosError = ex;
if (error.response) {
- logger.error('[AUTH0][EMAIL VERIFICATION][ERROR] %o', error.response.data, { userId: user.id });
+ logger.error('[AUTH0][EMAIL VERIFICATION][ERROR] %o', error.response.data, { userId: user.id, requestId: res.locals.requestId });
} else if (error.request) {
- logger.error('[AUTH0][EMAIL VERIFICATION][ERROR] %s', error.message || 'An unknown error has occurred.', { userId: user.id });
+ logger.error('[AUTH0][EMAIL VERIFICATION][ERROR] %s', error.message || 'An unknown error has occurred.', {
+ userId: user.id,
+ requestId: res.locals.requestId,
+ });
}
}
throw new UserFacingError('There was an error re-sending the verification email');
@@ -177,19 +243,19 @@ export async function deleteAccount(req: express.Request, res: express.Response)
'h:Reply-To': 'support@getjetstream.app',
})
.then((results) => {
- logger.info('[ACCOUNT DELETE][EMAIL SENT] %s', results.id);
+ logger.info('[ACCOUNT DELETE][EMAIL SENT] %s', results.id, { requestId: res.locals.requestId });
})
.catch((error) => {
- logger.error('[ACCOUNT DELETE][ERROR SENDING EMAIL SUMMARY] %s', error.message);
+ logger.error('[ACCOUNT DELETE][ERROR SENDING EMAIL SUMMARY] %s', error.message, { requestId: res.locals.requestId });
});
} catch (ex) {
- logger.error('[ACCOUNT DELETE][ERROR SENDING EMAIL SUMMARY] %s', ex.message);
+ logger.error('[ACCOUNT DELETE][ERROR SENDING EMAIL SUMMARY] %s', ex.message, { requestId: res.locals.requestId });
}
// Destroy session - don't wait for response
req.session.destroy((error) => {
if (error) {
- logger.error('[ACCOUNT DELETE][ERROR DESTROYING SESSION] %s', error.message);
+ logger.error('[ACCOUNT DELETE][ERROR DESTROYING SESSION] %s', error.message, { requestId: res.locals.requestId });
}
});
@@ -198,9 +264,12 @@ export async function deleteAccount(req: express.Request, res: express.Response)
if (ex.isAxiosError) {
const error: AxiosError = ex;
if (error.response) {
- logger.error('[ACCOUNT DELETE][FATAL ERROR] %o', error.response.data, { userId: user.id });
+ logger.error('[ACCOUNT DELETE][FATAL ERROR] %o', error.response.data, { userId: user.id, requestId: res.locals.requestId });
} else if (error.request) {
- logger.error('[ACCOUNT DELETE][FATAL ERROR] %s', error.message || 'An unknown error has occurred.', { userId: user.id });
+ logger.error('[ACCOUNT DELETE][FATAL ERROR] %s', error.message || 'An unknown error has occurred.', {
+ userId: user.id,
+ requestId: res.locals.requestId,
+ });
}
}
throw new UserFacingError('There was a problem deleting your account, contact support@getjetstream.app for assistance.');
diff --git a/apps/api/src/app/db/user.db.ts b/apps/api/src/app/db/user.db.ts
index 2704ff09e..6200e2b3c 100644
--- a/apps/api/src/app/db/user.db.ts
+++ b/apps/api/src/app/db/user.db.ts
@@ -1,21 +1,57 @@
import { ENV, logger, prisma } from '@jetstream/api-config';
import { UserProfileServer } from '@jetstream/types';
-import { User } from '@prisma/client';
+import { Prisma, User } from '@prisma/client';
+
+const userSelect: Prisma.UserSelect = {
+ appMetadata: true,
+ createdAt: true,
+ email: true,
+ id: true,
+ name: true,
+ nickname: true,
+ picture: true,
+ preferences: {
+ select: {
+ skipFrontdoorLogin: true,
+ },
+ },
+ updatedAt: true,
+ userId: true,
+};
/**
* Find by Auth0 userId, not Jetstream Id
*/
-async function findByUserId(userId: string) {
- return await prisma.user.findUnique({
- where: { userId: userId },
+export async function findByUserId(userId: string) {
+ const user = await prisma.user.findUnique({
+ where: { userId },
+ select: userSelect,
});
+ return user;
}
-export async function updateUser(user: UserProfileServer, data: { name: string }): Promise {
+export async function updateUser(
+ user: UserProfileServer,
+ data: { name: string; preferences: { skipFrontdoorLogin: boolean } }
+): Promise {
try {
+ const existingUser = await prisma.user.findUnique({
+ where: { userId: user.id },
+ select: { id: true, preferences: { select: { skipFrontdoorLogin: true } } },
+ });
+ const skipFrontdoorLogin = data.preferences.skipFrontdoorLogin ?? (existingUser?.preferences?.skipFrontdoorLogin || false);
const updatedUser = await prisma.user.update({
where: { userId: user.id },
- data: { name: data.name },
+ data: {
+ name: data.name,
+ preferences: {
+ upsert: {
+ create: { skipFrontdoorLogin },
+ update: { skipFrontdoorLogin },
+ },
+ },
+ },
+ select: userSelect,
});
return updatedUser;
} catch (ex) {
@@ -36,7 +72,14 @@ export async function createOrUpdateUser(user: UserProfileServer): Promise<{ cre
where: { userId: user.id },
data: {
appMetadata: JSON.stringify(user._json[ENV.AUTH_AUDIENCE!]),
+ preferences: {
+ upsert: {
+ create: { skipFrontdoorLogin: false },
+ update: { skipFrontdoorLogin: false },
+ },
+ },
},
+ select: userSelect,
});
logger.debug('[DB][USER][UPDATED] %s', user.id, { userId: user.id, id: existingUser.id });
return { created: false, user: updatedUser };
@@ -49,7 +92,9 @@ export async function createOrUpdateUser(user: UserProfileServer): Promise<{ cre
nickname: user._json.nickname,
picture: user._json.picture,
appMetadata: JSON.stringify(user._json[ENV.AUTH_AUDIENCE!]),
+ preferences: { create: { skipFrontdoorLogin: false } },
},
+ select: userSelect,
});
logger.debug('[DB][USER][CREATED] %s', user.id, { userId: user.id, id: createdUser.id });
return { created: true, user: createdUser };
diff --git a/apps/api/src/app/routes/api.routes.ts b/apps/api/src/app/routes/api.routes.ts
index 95d8413f8..ef5c817a7 100644
--- a/apps/api/src/app/routes/api.routes.ts
+++ b/apps/api/src/app/routes/api.routes.ts
@@ -1,7 +1,7 @@
import { ENV } from '@jetstream/api-config';
-import * as express from 'express';
+import express from 'express';
import Router from 'express-promise-router';
-import * as multer from 'multer';
+import multer from 'multer';
import * as imageController from '../controllers/image.controller';
import * as orgsController from '../controllers/orgs.controller';
import * as salesforceApiReqController from '../controllers/salesforce-api-requests.controller';
diff --git a/apps/api/src/app/routes/route.middleware.ts b/apps/api/src/app/routes/route.middleware.ts
index 841753e5f..cc9573fe0 100644
--- a/apps/api/src/app/routes/route.middleware.ts
+++ b/apps/api/src/app/routes/route.middleware.ts
@@ -8,11 +8,17 @@ import * as express from 'express';
import { ValidationChain, validationResult } from 'express-validator';
import * as jsforce from 'jsforce';
import { isNumber } from 'lodash';
+import { v4 as uuid } from 'uuid';
import * as salesforceOrgsDb from '../db/salesforce-org.db';
import { updateUserLastActivity } from '../services/auth0';
import { getJsforceOauth2 } from '../utils/auth-utils';
import { AuthenticationError, NotFoundError, UserFacingError } from '../utils/error-handler';
+export function addContextMiddleware(req: express.Request, res: express.Response, next: express.NextFunction) {
+ res.locals.requestId = uuid();
+ next();
+}
+
/**
* Set's cookie that is used by front-end application
* @param req
@@ -34,11 +40,11 @@ export function setApplicationCookieMiddleware(req: express.Request, res: expres
export function logRoute(req: express.Request, res: express.Response, next: express.NextFunction) {
res.locals.path = req.path;
- // logger.info(req.method, req.originalUrl);
const userInfo = req.user ? { username: (req.user as any)?.displayName, userId: (req.user as any)?.user_id } : undefined;
logger.debug('[REQ] %s %s', req.method, req.originalUrl, {
method: req.method,
url: req.originalUrl,
+ requestId: res.locals.requestId,
agent: req.header('User-Agent'),
ip: req.headers[HTTP.HEADERS.CF_Connecting_IP] || req.headers[HTTP.HEADERS.X_FORWARDED_FOR] || req.connection.remoteAddress,
country: req.headers[HTTP.HEADERS.CF_IPCountry],
@@ -77,6 +83,7 @@ export function blockBotByUserAgentMiddleware(req: express.Request, res: express
blocked: true,
method: req.method,
url: req.originalUrl,
+ requestId: res.locals.requestId,
agent: req.header('User-Agent'),
referrer: req.get('Referrer'),
ip: req.headers[HTTP.HEADERS.CF_Connecting_IP] || req.headers[HTTP.HEADERS.X_FORWARDED_FOR] || req.connection.remoteAddress,
@@ -106,23 +113,37 @@ export async function checkAuth(req: express.Request, res: express.Response, nex
// Update auth0 with expiration date
updateUserLastActivity(req.user as UserProfileServer, fromUnixTime(req.session.activityExp))
.then(() => {
- logger.debug('[AUTH][LAST-ACTIVITY][UPDATED] %s', req.session.activityExp, { userId: (req.user as any)?.user_id });
+ logger.debug('[AUTH][LAST-ACTIVITY][UPDATED] %s', req.session.activityExp, {
+ userId: (req.user as any)?.user_id,
+ requestId: res.locals.requestId,
+ });
})
.catch((err) => {
// send error to rollbar
const error: AxiosError = err;
if (error.response) {
- logger.error('[AUTH][LAST-ACTIVITY][ERROR] %o', error.response.data, { userId: (req.user as any)?.user_id });
+ logger.error('[AUTH][LAST-ACTIVITY][ERROR] %o', error.response.data, {
+ userId: (req.user as any)?.user_id,
+ requestId: res.locals.requestId,
+ });
} else if (error.request) {
logger.error('[AUTH][LAST-ACTIVITY][ERROR] %s', error.message || 'An unknown error has occurred.', {
userId: (req.user as any)?.user_id,
+ requestId: res.locals.requestId,
});
}
- rollbarServer.error('Error updating Auth0 activityExp', { message: err.message, stack: err.stack });
+ rollbarServer.error('Error updating Auth0 activityExp', {
+ message: err.message,
+ stack: err.stack,
+ requestId: res.locals.requestId,
+ });
});
}
} catch (ex) {
- logger.warn('[AUTH][LAST-ACTIVITY][ERROR] Exception: %s', ex.message, { userId: (req.user as any)?.user_id });
+ logger.warn('[AUTH][LAST-ACTIVITY][ERROR] Exception: %s', ex.message, {
+ userId: (req.user as any)?.user_id,
+ requestId: res.locals.requestId,
+ });
}
return next();
}
@@ -130,6 +151,7 @@ export async function checkAuth(req: express.Request, res: express.Response, nex
blocked: true,
method: req.method,
url: req.originalUrl,
+ requestId: res.locals.requestId,
agent: req.header('User-Agent'),
ip: req.headers[HTTP.HEADERS.CF_Connecting_IP] || req.headers[HTTP.HEADERS.X_FORWARDED_FOR] || req.connection.remoteAddress,
country: req.headers[HTTP.HEADERS.CF_IPCountry],
@@ -141,7 +163,7 @@ export async function addOrgsToLocal(req: express.Request, res: express.Response
try {
if (req.get(HTTP.HEADERS.X_SFDC_ID) || req.query[HTTP.HEADERS.X_SFDC_ID]) {
res.locals = res.locals || {};
- const results = await getOrgFromHeaderOrQuery(req, HTTP.HEADERS.X_SFDC_ID, HTTP.HEADERS.X_SFDC_API_VERSION);
+ const results = await getOrgFromHeaderOrQuery(req, HTTP.HEADERS.X_SFDC_ID, HTTP.HEADERS.X_SFDC_API_VERSION, res.locals.requestId);
if (results) {
const { org, connection } = results;
res.locals.org = org;
@@ -150,7 +172,12 @@ export async function addOrgsToLocal(req: express.Request, res: express.Response
}
if (req.get(HTTP.HEADERS.X_SFDC_ID_TARGET) || req.query[HTTP.HEADERS.X_SFDC_ID_TARGET]) {
res.locals = res.locals || {};
- const results = await getOrgFromHeaderOrQuery(req, HTTP.HEADERS.X_SFDC_ID_TARGET, HTTP.HEADERS.X_SFDC_API_TARGET_VERSION);
+ const results = await getOrgFromHeaderOrQuery(
+ req,
+ HTTP.HEADERS.X_SFDC_ID_TARGET,
+ HTTP.HEADERS.X_SFDC_API_TARGET_VERSION,
+ res.locals.requestId
+ );
if (results) {
if (results) {
const { org, connection } = results;
@@ -160,7 +187,7 @@ export async function addOrgsToLocal(req: express.Request, res: express.Response
}
}
} catch (ex) {
- logger.warn('[INIT-ORG][ERROR] %o', ex);
+ logger.warn('[INIT-ORG][ERROR] %o', ex, { requestId: res.locals.requestId });
return next(new UserFacingError('There was an error initializing the connection to Salesforce'));
}
@@ -173,18 +200,18 @@ export async function addOrgsToLocal(req: express.Request, res: express.Response
export async function monkeyPatchOrgsToRequest(req: express.Request, res: express.Response, next: express.NextFunction) {
try {
if (req.get(HTTP.HEADERS.X_SFDC_ID) || req.query[HTTP.HEADERS.X_SFDC_ID]) {
- const results = await getOrgFromHeaderOrQuery(req, HTTP.HEADERS.X_SFDC_ID, HTTP.HEADERS.X_SFDC_API_VERSION);
+ const results = await getOrgFromHeaderOrQuery(req, HTTP.HEADERS.X_SFDC_ID, HTTP.HEADERS.X_SFDC_API_VERSION, res.locals.requestId);
if (results) {
const { org, connection } = results;
res.locals = { org, jsforceConn: connection };
(req as any).locals = res.locals;
} else {
- logger.info('[INIT-ORG][ERROR] An org did not exist on locals - Monkey Patch');
+ logger.info('[INIT-ORG][ERROR] An org did not exist on locals - Monkey Patch', { requestId: res.locals.requestId });
return next(new UserFacingError('An org is required for this action'));
}
}
} catch (ex) {
- logger.warn('[INIT-ORG][ERROR] %o', ex);
+ logger.warn('[INIT-ORG][ERROR] %o', ex, { requestId: res.locals.requestId });
return next(new UserFacingError('There was an error initializing the connection to Salesforce'));
}
@@ -193,7 +220,7 @@ export async function monkeyPatchOrgsToRequest(req: express.Request, res: expres
export function ensureOrgExists(req: express.Request, res: express.Response, next: express.NextFunction) {
if (!res.locals?.jsforceConn) {
- logger.info('[INIT-ORG][ERROR] An org did not exist on locals');
+ logger.info('[INIT-ORG][ERROR] An org did not exist on locals', { requestId: res.locals.requestId });
return next(new UserFacingError('An org is required for this action'));
}
next();
@@ -201,7 +228,7 @@ export function ensureOrgExists(req: express.Request, res: express.Response, nex
export function ensureTargetOrgExists(req: express.Request, res: express.Response, next: express.NextFunction) {
if (!res.locals?.targetJsforceConn) {
- logger.info('[INIT-ORG][ERROR] A target org did not exist on locals');
+ logger.info('[INIT-ORG][ERROR] A target org did not exist on locals', { requestId: res.locals.requestId });
return next(new UserFacingError('A target org is required for this action'));
}
next();
@@ -217,7 +244,7 @@ export function ensureTargetOrgExists(req: express.Request, res: express.Respons
* @param headerKey
* @param versionHeaderKey
*/
-export async function getOrgFromHeaderOrQuery(req: express.Request, headerKey: string, versionHeaderKey: string) {
+export async function getOrgFromHeaderOrQuery(req: express.Request, headerKey: string, versionHeaderKey: string, requestId?: string) {
const uniqueId = (req.get(headerKey) || req.query[headerKey]) as string;
// TODO: not yet implemented on the front-end
const apiVersion = (req.get(versionHeaderKey) || req.query[versionHeaderKey]) as string | undefined;
@@ -230,10 +257,16 @@ export async function getOrgFromHeaderOrQuery(req: express.Request, headerKey: s
return;
}
- return getOrgForRequest(user, uniqueId, apiVersion, includeCallOptions);
+ return getOrgForRequest(user, uniqueId, apiVersion, includeCallOptions, requestId);
}
-export async function getOrgForRequest(user: UserProfileServer, uniqueId: string, apiVersion?: string, includeCallOptions?: boolean) {
+export async function getOrgForRequest(
+ user: UserProfileServer,
+ uniqueId: string,
+ apiVersion?: string,
+ includeCallOptions?: boolean,
+ requestId?: string
+) {
const org = await salesforceOrgsDb.findByUniqueId_UNSAFE(user.id, uniqueId);
if (!org) {
throw new UserFacingError('An org was not found with the provided id');
@@ -254,6 +287,7 @@ export async function getOrgForRequest(user: UserProfileServer, uniqueId: string
// http://www.fishofprey.com/2016/03/salesforce-forcecom-ide-superpowers.html
// FIXME: this breaks some orgs
// client: `apex_eclipse/v${apiVersion || org.apiVersion || ENV.SFDC_API_VERSION}`,
+ client: 'jetstream',
},
};
@@ -272,9 +306,9 @@ export async function getOrgForRequest(user: UserProfileServer, uniqueId: string
return;
}
await salesforceOrgsDb.updateAccessToken_UNSAFE(org, accessToken, conn.refreshToken);
- logger.info('[ORG][REFRESH] Org refreshed successfully');
+ logger.info('[ORG][REFRESH] Org refreshed successfully', { requestId });
} catch (ex) {
- logger.error('[ORG][REFRESH] Error saving refresh token', ex);
+ logger.error('[ORG][REFRESH] Error saving refresh token', ex, { requestId });
}
};
diff --git a/apps/api/src/app/services/auth0.ts b/apps/api/src/app/services/auth0.ts
index 347474891..ec13e9861 100644
--- a/apps/api/src/app/services/auth0.ts
+++ b/apps/api/src/app/services/auth0.ts
@@ -1,5 +1,5 @@
import { ENV, logger } from '@jetstream/api-config';
-import { UserProfileAuth0Identity, UserProfileAuth0Ui, UserProfileServer } from '@jetstream/types';
+import { UserProfileAuth0Identity, UserProfileAuth0Ui, UserProfileServer, UserProfileUiWithIdentities } from '@jetstream/types';
import axios, { AxiosError } from 'axios';
import { addHours, addSeconds, formatISO, isBefore } from 'date-fns';
import * as userDb from '../db/user.db';
@@ -76,10 +76,13 @@ export async function updateUserLastActivity(user: UserProfileServer, lastActivi
).data;
}
-export async function updateUser(user: UserProfileServer, userProfile: { name: string }): Promise {
+export async function updateUser(user: UserProfileServer, userProfile: UserProfileUiWithIdentities): Promise {
await initAuthorizationToken(user);
- // update on Auth0
- await axiosAuth0.patch(`/api/v2/users/${user.id}`, userProfile);
+
+ if (user.displayName !== userProfile.name) {
+ // update on Auth0 if name changed (not allowed for OAuth connections)
+ await axiosAuth0.patch(`/api/v2/users/${user.id}`, { name: userProfile.name });
+ }
// update locally
await userDb.updateUser(user, userProfile);
// re-fetch user from Auth0
diff --git a/apps/api/src/app/services/query.ts b/apps/api/src/app/services/query.ts
index e256d58b9..00585c1ea 100644
--- a/apps/api/src/app/services/query.ts
+++ b/apps/api/src/app/services/query.ts
@@ -2,7 +2,7 @@
import { logger } from '@jetstream/api-config';
import { QueryResults, QueryResultsColumn, QueryResultsColumns } from '@jetstream/api-interfaces';
import type { Connection } from 'jsforce';
-import { parseQuery, Query } from 'soql-parser-js';
+import { Query, parseQuery } from 'soql-parser-js';
import { QueryColumnMetadata, QueryColumnsSfdc } from '../types/types';
/**
@@ -40,7 +40,7 @@ export async function queryRecords(
groupBy: tempColumns.groupBy,
idSelected: tempColumns.idSelected,
keyPrefix: tempColumns.keyPrefix,
- columns: tempColumns.columnMetadata.flatMap((column) => flattenQueryColumn(column)),
+ columns: tempColumns.columnMetadata?.flatMap((column) => flattenQueryColumn(column)),
};
} catch (ex) {
logger.error('Error fetching columns', ex);
@@ -76,7 +76,7 @@ function flattenQueryColumn(column: QueryColumnMetadata, prevColumnPath?: string
if (Array.isArray(column.joinColumns) && column.joinColumns.length > 0) {
if (column.foreignKeyName) {
// Parent Query
- output = output.concat(column.joinColumns.flatMap((joinColumn) => flattenQueryColumn(joinColumn, currColumnPath)));
+ output = output.concat((column.joinColumns || [])?.flatMap((joinColumn) => flattenQueryColumn(joinColumn, currColumnPath)));
} else {
// Child query
output.push({
@@ -92,7 +92,7 @@ function flattenQueryColumn(column: QueryColumnMetadata, prevColumnPath?: string
numberType: column.numberType,
textType: column.textType,
updatable: column.updatable,
- childColumnPaths: column.joinColumns.flatMap((joinColumn) => flattenQueryColumn(joinColumn, currColumnPath)),
+ childColumnPaths: (column.joinColumns || [])?.flatMap((joinColumn) => flattenQueryColumn(joinColumn, currColumnPath)),
});
}
} else {
diff --git a/apps/api/src/app/services/sf-misc.ts b/apps/api/src/app/services/sf-misc.ts
index 2717fb3fe..595212ae6 100644
--- a/apps/api/src/app/services/sf-misc.ts
+++ b/apps/api/src/app/services/sf-misc.ts
@@ -1,12 +1,165 @@
-import { ensureArray, orderObjectsBy } from '@jetstream/shared/utils';
-import { ListMetadataResult, MapOf } from '@jetstream/types';
+import { ENV } from '@jetstream/api-config';
+import { HTTP, ORG_VERSION_PLACEHOLDER } from '@jetstream/shared/constants';
+import { ensureArray, getFullNameFromListMetadata, orderObjectsBy } from '@jetstream/shared/utils';
+import { ListMetadataResult, ManualRequestPayload, ManualRequestResponse, MapOf } from '@jetstream/types';
+import axios, { AxiosError, AxiosRequestConfig } from 'axios';
import type { PackageTypeMembers, RetrieveRequest } from 'jsforce';
-import { get as lodashGet, isObjectLike, isString } from 'lodash';
+import * as jsforce from 'jsforce';
+import { isObjectLike, isString, get as lodashGet } from 'lodash';
import { create as xmlBuilder } from 'xmlbuilder2';
import { UserFacingError } from '../utils/error-handler';
+const SESSION_ID_RGX = /\{sessionId\}/i;
const VALID_PACKAGE_VERSION = /^[0-9]+\.[0-9]+$/;
+export interface SalesforceRequestViaAxiosOptions extends ManualRequestPayload {
+ conn: jsforce.Connection;
+ /**
+ * If true, the function will throw an error if the request fails
+ * @default false
+ */
+ throwIfError?: boolean;
+ /**
+ * If true, the function will attempt to refresh the token and retry the request
+ * @default true
+ */
+ retryOnAuthFailure?: boolean;
+}
+
+/**
+ * Make API call to Salesforce without using JSForce
+ */
+export async function salesforceRequestViaAxios(options: SalesforceRequestViaAxiosOptions): Promise {
+ const { conn, method, headers = {}, throwIfError = false, retryOnAuthFailure = true } = options;
+ let { body, url } = options;
+ try {
+ url = url.replace(ORG_VERSION_PLACEHOLDER, conn.version);
+
+ const config: AxiosRequestConfig = {
+ url,
+ method,
+ baseURL: conn.instanceUrl,
+ // X-SFDC-Session is used for some SOAP APIs, such as the bulk api
+ headers: {
+ [HTTP.HEADERS.CONTENT_TYPE]: HTTP.CONTENT_TYPE.JSON,
+ [HTTP.HEADERS.ACCEPT]: HTTP.CONTENT_TYPE.JSON,
+ ...headers,
+ ['Authorization']: `Bearer ${conn.accessToken}`,
+ ['X-SFDC-Session']: conn.accessToken,
+ },
+ responseType: 'text',
+ // validateStatus: false,
+ timeout: 120000,
+ transformResponse: [], // required to avoid automatic json parsing
+ };
+
+ if (isString(body) && SESSION_ID_RGX.test(body)) {
+ body = body.replace(SESSION_ID_RGX, conn.accessToken);
+ }
+
+ if (method !== 'GET' && body) {
+ config.data = body;
+ }
+
+ const response = await axios.request(config);
+
+ return {
+ error: false,
+ status: response.status,
+ statusText: response.statusText,
+ headers: JSON.stringify(response.headers || {}, null, 2),
+ body: response.data,
+ };
+ } catch (ex) {
+ if (retryOnAuthFailure && ex instanceof AxiosError) {
+ const response = ex.response;
+ if (response.status === 401 || (isString(response.data) && response.data.includes('INVALID_SESSION_ID'))) {
+ // attempt another API call which should auto-refresh and try again
+ try {
+ await refreshAccessToken(conn);
+ } catch (ex) {
+ console.error('Failed to refresh token', ex);
+ }
+ return await salesforceRequestViaAxios({ ...options, retryOnAuthFailure: false });
+ }
+ }
+ if (throwIfError) {
+ throw ex;
+ }
+ if (ex instanceof AxiosError) {
+ const response = ex.response;
+ if (response) {
+ return {
+ error: response.status < 200 || response.status > 300,
+ status: response.status,
+ statusText: response.statusText,
+ headers: JSON.stringify(response.headers || {}, null, 2),
+ body: response.data,
+ };
+ } else if (ex.request) {
+ return {
+ error: true,
+ errorMessage: ex.message || 'An unknown error has occurred.',
+ status: null,
+ statusText: null,
+ headers: null,
+ body: null,
+ };
+ }
+ } else if (ex instanceof Error) {
+ return {
+ error: true,
+ errorMessage: ex.message || 'An unknown error has occurred.',
+ status: null,
+ statusText: null,
+ headers: null,
+ body: null,
+ };
+ }
+ return {
+ error: true,
+ errorMessage: ex?.message || 'An unknown error has occurred, the request was not made.',
+ status: null,
+ statusText: null,
+ headers: null,
+ body: null,
+ };
+ }
+}
+
+export async function refreshAccessToken(conn: jsforce.Connection): Promise {
+ try {
+ const response = await axios.request<{ access_token: string }>({
+ url: '/services/oauth2/token',
+ method: 'POST',
+ baseURL: conn.instanceUrl,
+ // X-SFDC-Session is used for some SOAP APIs, such as the bulk api
+ headers: {
+ [HTTP.HEADERS.CONTENT_TYPE]: HTTP.CONTENT_TYPE.FORM_URL,
+ [HTTP.HEADERS.ACCEPT]: HTTP.CONTENT_TYPE.JSON,
+ },
+ responseType: 'json',
+ timeout: 20000,
+ data: new URLSearchParams({
+ grant_type: 'refresh_token',
+ client_id: ENV.SFDC_CONSUMER_KEY,
+ client_secret: ENV.SFDC_CONSUMER_SECRET,
+ refresh_token: conn.refreshToken,
+ }).toString(),
+ });
+
+ conn.accessToken = response.data.access_token;
+ try {
+ conn.emit('refresh', conn.accessToken, conn.refreshToken);
+ } catch (ex) {
+ console.error('Failed to emit refresh event', ex);
+ }
+ } catch (ex) {
+ console.error('Failed to refresh token', ex);
+ throw ex;
+ }
+}
+
export function buildPackageXml(types: MapOf, version: string, otherFields: MapOf = {}, prettyPrint = true) {
// prettier-ignore
const packageNode = xmlBuilder({ version: '1.0', encoding: 'UTF-8' })
@@ -15,8 +168,14 @@ export function buildPackageXml(types: MapOf, version: str
Object.keys(types).forEach((metadataType) => {
const typesNode = packageNode.ele('types');
if (types[metadataType].length) {
- orderObjectsBy(types[metadataType], 'fullName').forEach(({ fullName }) => {
- typesNode.ele('members').txt(fullName);
+ orderObjectsBy(types[metadataType], 'fullName').forEach(({ fullName, namespacePrefix }) => {
+ typesNode.ele('members').txt(
+ getFullNameFromListMetadata({
+ fullName,
+ metadataType,
+ namespace: namespacePrefix,
+ })
+ );
});
typesNode.ele('name').txt(metadataType);
}
@@ -42,7 +201,13 @@ export function getRetrieveRequestFromListMetadata(types: MapOf {
const members = types[metadataName];
return {
- members: members.map(({ fullName }) => fullName),
+ members: members.map(({ fullName, namespacePrefix }) => {
+ return getFullNameFromListMetadata({
+ fullName,
+ metadataType: metadataName,
+ namespace: namespacePrefix,
+ });
+ }),
name: metadataName,
};
}),
diff --git a/apps/api/src/app/utils/error-handler.ts b/apps/api/src/app/utils/error-handler.ts
index 193310e6b..a135ff033 100644
--- a/apps/api/src/app/utils/error-handler.ts
+++ b/apps/api/src/app/utils/error-handler.ts
@@ -6,7 +6,7 @@ export class UserFacingError extends Error {
constructor(message: string | Error, additionalData?: any) {
if (message instanceof Error) {
if (message.message.startsWith('(res: express.Response, content?: ResponseType, status = 200) {
+ if (res.headersSent) {
+ logger.warn('Response headers already sent', { requestId: res.locals.requestId });
+ try {
+ rollbarServer.warn('Response not handled by sendJson, headers already sent', new Error('headers already sent'), {
+ requestId: res.locals.requestId,
+ });
+ } catch (ex) {
+ logger.error('Error sending to Rollbar', ex, { requestId: res.locals.requestId });
+ }
+ return;
+ }
res.status(status);
return res.json({ data: content || {} });
}
@@ -19,6 +43,7 @@ export function blockBotHandler(req: express.Request, res: express.Response) {
blocked: true,
method: req.method,
url: req.originalUrl,
+ requestId: res.locals.requestId,
agent: req.header('User-Agent'),
referrer: req.get('Referrer'),
ip: req.headers[HTTP.HEADERS.CF_Connecting_IP] || req.headers[HTTP.HEADERS.X_FORWARDED_FOR] || req.connection.remoteAddress,
@@ -37,12 +62,23 @@ export async function uncaughtErrorHandler(err: any, req: express.Request, res:
error: err.message || err,
method: req.method,
url: req.originalUrl,
+ requestId: res.locals.requestId,
agent: req.header('User-Agent'),
ip: req.headers[HTTP.HEADERS.CF_Connecting_IP] || req.headers[HTTP.HEADERS.X_FORWARDED_FOR] || req.connection.remoteAddress,
country: req.headers[HTTP.HEADERS.CF_IPCountry],
...userInfo,
});
+ if (res.headersSent) {
+ logger.warn('Response headers already sent', { requestId: res.locals.requestId });
+ try {
+ rollbarServer.warn('Error not handled by error handler, headers already sent', req, userInfo, err, new Error('headers already sent'));
+ } catch (ex) {
+ logger.error('Error sending to Rollbar', ex, { requestId: res.locals.requestId });
+ }
+ return;
+ }
+
const isJson = (req.get(HTTP.HEADERS.ACCEPT) || '').includes(HTTP.CONTENT_TYPE.JSON);
// If org had a connection error, ensure that the database is updated
@@ -58,7 +94,11 @@ export async function uncaughtErrorHandler(err: any, req: express.Request, res:
const org = res.locals.org as SalesforceOrg;
await salesforceOrgsDb.updateOrg_UNSAFE(org, { connectionError: ERROR_MESSAGES.SFDC_EXPIRED_TOKEN });
} catch (ex) {
- logger.warn('[RESPONSE][ERROR UPDATING INVALID ORG] %s', ex.message, { error: ex.message, userInfo });
+ logger.warn('[RESPONSE][ERROR UPDATING INVALID ORG] %s', ex.message, {
+ error: ex.message,
+ userInfo,
+ requestId: res.locals.requestId,
+ });
}
}
@@ -104,13 +144,13 @@ export async function uncaughtErrorHandler(err: any, req: express.Request, res:
// TODO: clean up everything below this
- logger.error(err.message, { userInfo });
- logger.error(err.stack, { userInfo });
+ logger.error(err.message, { userInfo, requestId: res.locals.requestId });
+ logger.error(err.stack, { userInfo, requestId: res.locals.requestId });
try {
rollbarServer.warn('Error not handled by error handler', req, userInfo, err);
} catch (ex) {
- logger.error('Error sending to Rollbar', ex);
+ logger.error('Error sending to Rollbar', ex, { requestId: res.locals.requestId });
}
const errorMessage = 'There was an error processing the request';
diff --git a/apps/api/src/main.ts b/apps/api/src/main.ts
index 9f2fddefd..19e80d68a 100644
--- a/apps/api/src/main.ts
+++ b/apps/api/src/main.ts
@@ -2,19 +2,27 @@ import '@jetstream/api-config'; // this gets imported first to ensure as some it
import { ENV, logger, pgPool } from '@jetstream/api-config';
import { HTTP, SESSION_EXP_DAYS } from '@jetstream/shared/constants';
import { json, raw, urlencoded } from 'body-parser';
-import * as pgSimple from 'connect-pg-simple';
-import * as cors from 'cors';
-import * as express from 'express';
+import cluster from 'cluster';
+import pgSimple from 'connect-pg-simple';
+import cors from 'cors';
+import express from 'express';
import proxy from 'express-http-proxy';
-import * as session from 'express-session';
-import * as helmet from 'helmet';
-import * as passport from 'passport';
-import * as Auth0Strategy from 'passport-auth0';
+import session from 'express-session';
+import helmet from 'helmet';
+import { cpus } from 'os';
+import passport from 'passport';
+import Auth0Strategy from 'passport-auth0';
import { Strategy as CustomStrategy } from 'passport-custom';
import { join } from 'path';
import { initSocketServer } from './app/controllers/socket.controller';
import { apiRoutes, oauthRoutes, platformEventRoutes, staticAuthenticatedRoutes, testRoutes } from './app/routes';
-import { blockBotByUserAgentMiddleware, logRoute, notFoundMiddleware, setApplicationCookieMiddleware } from './app/routes/route.middleware';
+import {
+ addContextMiddleware,
+ blockBotByUserAgentMiddleware,
+ logRoute,
+ notFoundMiddleware,
+ setApplicationCookieMiddleware,
+} from './app/routes/route.middleware';
import { blockBotHandler, healthCheck, uncaughtErrorHandler } from './app/utils/response.handlers';
import { environment } from './environments/environment';
@@ -24,184 +32,233 @@ declare module 'express-session' {
}
}
-const pgSession = pgSimple(session);
-
-const sessionMiddleware = session({
- store: new pgSession({
- pool: pgPool,
- tableName: 'sessions',
- }),
- cookie: {
- path: '/',
- // httpOnly: true,
- secure: environment.production,
- maxAge: 1000 * 60 * 60 * 24 * SESSION_EXP_DAYS,
- // sameSite: 'strict',
- },
- secret: ENV.JETSTREAM_SESSION_SECRET,
- resave: false,
- saveUninitialized: false,
- // This will extend the cookie expiration date if there is a request of any kind to a logged in user
- rolling: true,
- name: 'sessionid',
-});
-
-passport.serializeUser(function (user, done) {
- done(null, user);
-});
-
-passport.deserializeUser(function (user, done) {
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
- done(null, user!);
-});
-
-const passportInitMiddleware = passport.initialize();
-const passportMiddleware = passport.session();
-
-const app = express();
-const httpServer = initSocketServer(app, [sessionMiddleware, passportInitMiddleware, passportMiddleware]);
-
-if (environment.production) {
- app.set('trust proxy', 1); // required for environments such as heroku / {render?}
-}
+// NOTE: render reports more CPUs than are actually available
+const CPU_COUNT = Math.min(cpus().length, 3);
-// Setup session
-app.use(sessionMiddleware);
-
-// app.use(compression());
-app.use(
- helmet({
- contentSecurityPolicy: {
- directives: {
- defaultSrc: [
- "'self'",
- '*.google-analytics.com',
- '*.google.com',
- '*.googleapis.com',
- '*.gstatic.com',
- '*.rollbar.com',
- 'api.amplitude.com',
- 'api.cloudinary.com',
- ],
- baseUri: ["'self'"],
- blockAllMixedContent: [],
- fontSrc: ["'self'", 'https:', "'unsafe-inline'", 'data:', '*.gstatic.com'],
- frameAncestors: ["'self'", '*.google.com'],
- imgSrc: [
- "'self'",
- 'data:',
- '*.cloudinary.com',
- '*.ctfassets.net',
- '*.documentforce.com',
- '*.force.com',
- '*.githubusercontent.com',
- '*.google-analytics.com',
- '*.googletagmanager.com',
- '*.googleusercontent.com',
- '*.gravatar.com',
- '*.gstatic.com',
- '*.salesforce.com',
- '*.wp.com',
- ],
- objectSrc: ["'none'"],
- scriptSrc: [
- "'self'",
- "'sha256-AS526U4qXJy7/SohgsysWUxi77DtcgSmP0hNfTo6/Hs='", // Google Analytics (Docs)
- "'sha256-pOkCIUf8FXwCoKWPXTEJAC2XGbyg3ftSrE+IES4aqEY='", // Google Analytics (Next/React)
- 'blob:',
- '*.google.com',
- '*.gstatic.com',
- '*.google-analytics.com',
- '*.googletagmanager.com',
- ],
- scriptSrcAttr: ["'none'"],
- styleSrc: ["'self'", 'https:', "'unsafe-inline'"],
- upgradeInsecureRequests: [],
- },
- },
- })
-);
+if (ENV.NODE_ENV === 'production' && cluster.isPrimary) {
+ logger.info(`Number of CPUs is ${CPU_COUNT}`);
+ logger.info(`Master ${process.pid} is running`);
-if (ENV.ENVIRONMENT === 'development') {
- /**
- * All analytics go through our server instead of directly to amplitude
- * This ensures that amplitude is not blocked by various browser tools
- */
- app.use('/analytics', cors({ origin: /http:\/\/localhost:[0-9]+$/ }), (req, res) => res.status(200).send('success'));
+ for (let i = 0; i < CPU_COUNT; i++) {
+ cluster.fork();
+ }
+
+ cluster.on('exit', (worker, code, signal) => {
+ logger.info(`worker ${worker.process.pid} died, restarting`, { code, signal });
+ cluster.fork();
+ });
} else {
- /**
- * All analytics go through our server instead of directly to amplitude
- * This ensures that amplitude is not blocked by various browser tools
- */
- app.use('/analytics', proxy('https://api.amplitude.com'));
-}
+ logger.info(`Worker ${process.pid} started`);
-app.use(blockBotByUserAgentMiddleware);
-app.use(setApplicationCookieMiddleware);
-
-/** Manual test user, skip Auth0 completely */
-passport.use(
- 'custom',
- new CustomStrategy(function (req, callback) {
- if (req.hostname !== 'localhost' || !ENV.EXAMPLE_USER_OVERRIDE || !ENV.EXAMPLE_USER) {
- return callback(new Error('Test user not enabled'));
- }
-
- const user = ENV.EXAMPLE_USER;
- req.user = user;
- callback(null, user);
- })
-);
-
-passport.use(
- 'auth0',
- new Auth0Strategy(
- {
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
- domain: ENV.AUTH0_DOMAIN!,
- clientID: ENV.AUTH0_CLIENT_ID!,
- clientSecret: ENV.AUTH0_CLIENT_SECRET!,
- callbackURL: `${ENV.JETSTREAM_SERVER_URL}/oauth/callback`,
- },
- (accessToken, refreshToken, extraParams, profile, done) => {
- // accessToken is the token to call Auth0 API (not needed in the most cases)
- // extraParams.id_token has the JSON Web Token
- // profile has all the information from the user
- return done(null, profile);
- }
- )
-);
-
-/** This configuration is used for authorization, not authentication (e.x. link second identity to user) */
-passport.use(
- 'auth0-authz',
- new Auth0Strategy(
- {
- domain: ENV.AUTH0_DOMAIN!,
- clientID: ENV.AUTH0_CLIENT_ID!,
- clientSecret: ENV.AUTH0_CLIENT_SECRET!,
- callbackURL: `${ENV.JETSTREAM_SERVER_URL}/oauth/identity/link/callback`,
+ const pgSession = pgSimple(session);
+
+ const sessionMiddleware = session({
+ store: new pgSession({
+ pool: pgPool,
+ tableName: 'sessions',
+ }),
+ cookie: {
+ path: '/',
+ // httpOnly: true,
+ secure: environment.production,
+ maxAge: 1000 * 60 * 60 * 24 * SESSION_EXP_DAYS,
+ // sameSite: 'strict',
},
- (accessToken, refreshToken, extraParams, profile, done) => {
- // accessToken is the token to call Auth0 API (not needed in the most cases)
- // extraParams.id_token has the JSON Web Token
- // profile has all the information from the user
- return done(null, profile);
- }
- )
-);
-
-app.use(passportInitMiddleware);
-app.use(passportMiddleware);
-
-// proxy must be provided prior to body parser to ensure streaming response
-if (ENV.ENVIRONMENT === 'development') {
- app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
- if (req.headers.origin?.includes('localhost')) {
- res.setHeader('Access-Control-Allow-Origin', req.headers.origin);
- res.setHeader(
- 'Access-Control-Expose-Headers',
- [
+ secret: ENV.JETSTREAM_SESSION_SECRET,
+ resave: false,
+ saveUninitialized: false,
+ // This will extend the cookie expiration date if there is a request of any kind to a logged in user
+ rolling: true,
+ name: 'sessionid',
+ });
+
+ passport.serializeUser(function (user, done) {
+ done(null, user);
+ });
+
+ passport.deserializeUser(function (user, done) {
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
+ done(null, user!);
+ });
+
+ const passportInitMiddleware = passport.initialize();
+ const passportMiddleware = passport.session();
+
+ const app = express();
+ const httpServer = initSocketServer(app, [sessionMiddleware, passportInitMiddleware, passportMiddleware]);
+
+ if (environment.production) {
+ app.set('trust proxy', 1); // required for environments such as heroku / {render?}
+ }
+
+ app.use(addContextMiddleware);
+
+ // Setup session
+ app.use(sessionMiddleware);
+
+ // app.use(compression());
+ app.use(
+ helmet({
+ contentSecurityPolicy: {
+ directives: {
+ defaultSrc: [
+ "'self'",
+ '*.google-analytics.com',
+ '*.google.com',
+ '*.googleapis.com',
+ '*.gstatic.com',
+ '*.rollbar.com',
+ 'api.amplitude.com',
+ 'api.cloudinary.com',
+ ],
+ baseUri: ["'self'"],
+ blockAllMixedContent: [],
+ fontSrc: ["'self'", 'https:', "'unsafe-inline'", 'data:', '*.gstatic.com'],
+ frameAncestors: ["'self'", '*.google.com'],
+ imgSrc: [
+ "'self'",
+ 'data:',
+ '*.cloudinary.com',
+ '*.ctfassets.net',
+ '*.documentforce.com',
+ '*.force.com',
+ '*.githubusercontent.com',
+ '*.google-analytics.com',
+ '*.googletagmanager.com',
+ '*.googleusercontent.com',
+ '*.gravatar.com',
+ '*.gstatic.com',
+ '*.salesforce.com',
+ '*.wp.com',
+ ],
+ objectSrc: ["'none'"],
+ scriptSrc: [
+ "'self'",
+ "'sha256-AS526U4qXJy7/SohgsysWUxi77DtcgSmP0hNfTo6/Hs='", // Google Analytics (Docs)
+ "'sha256-pOkCIUf8FXwCoKWPXTEJAC2XGbyg3ftSrE+IES4aqEY='", // Google Analytics (Next/React)
+ 'blob:',
+ '*.google.com',
+ '*.gstatic.com',
+ '*.google-analytics.com',
+ '*.googletagmanager.com',
+ ],
+ scriptSrcAttr: ["'none'"],
+ styleSrc: ["'self'", 'https:', "'unsafe-inline'"],
+ upgradeInsecureRequests: [],
+ },
+ },
+ })
+ );
+
+ if (ENV.ENVIRONMENT === 'development') {
+ /**
+ * All analytics go through our server instead of directly to amplitude
+ * This ensures that amplitude is not blocked by various browser tools
+ */
+ app.use('/analytics', cors({ origin: /http:\/\/localhost:[0-9]+$/ }), (req, res) => res.status(200).send('success'));
+ } else {
+ /**
+ * All analytics go through our server instead of directly to amplitude
+ * This ensures that amplitude is not blocked by various browser tools
+ */
+ app.use('/analytics', proxy('https://api.amplitude.com'));
+ }
+
+ app.use(blockBotByUserAgentMiddleware);
+ app.use(setApplicationCookieMiddleware);
+
+ /** Manual test user, skip Auth0 completely */
+ passport.use(
+ 'custom',
+ new CustomStrategy(function (req, callback) {
+ if (req.hostname !== 'localhost' || !ENV.EXAMPLE_USER_OVERRIDE || !ENV.EXAMPLE_USER) {
+ return callback(new Error('Test user not enabled'));
+ }
+
+ const user = ENV.EXAMPLE_USER;
+ req.user = user;
+ callback(null, user);
+ })
+ );
+
+ passport.use(
+ 'auth0',
+ new Auth0Strategy(
+ {
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
+ domain: ENV.AUTH0_DOMAIN!,
+ clientID: ENV.AUTH0_CLIENT_ID!,
+ clientSecret: ENV.AUTH0_CLIENT_SECRET!,
+ callbackURL: `${ENV.JETSTREAM_SERVER_URL}/oauth/callback`,
+ },
+ (accessToken, refreshToken, extraParams, profile, done) => {
+ // accessToken is the token to call Auth0 API (not needed in the most cases)
+ // extraParams.id_token has the JSON Web Token
+ // profile has all the information from the user
+ return done(null, profile);
+ }
+ )
+ );
+
+ /** This configuration is used for authorization, not authentication (e.x. link second identity to user) */
+ passport.use(
+ 'auth0-authz',
+ new Auth0Strategy(
+ {
+ domain: ENV.AUTH0_DOMAIN!,
+ clientID: ENV.AUTH0_CLIENT_ID!,
+ clientSecret: ENV.AUTH0_CLIENT_SECRET!,
+ callbackURL: `${ENV.JETSTREAM_SERVER_URL}/oauth/identity/link/callback`,
+ },
+ (accessToken, refreshToken, extraParams, profile, done) => {
+ // accessToken is the token to call Auth0 API (not needed in the most cases)
+ // extraParams.id_token has the JSON Web Token
+ // profile has all the information from the user
+ return done(null, profile);
+ }
+ )
+ );
+
+ app.use(passportInitMiddleware);
+ app.use(passportMiddleware);
+
+ // proxy must be provided prior to body parser to ensure streaming response
+ if (ENV.ENVIRONMENT === 'development') {
+ app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
+ if (req.headers.origin?.includes('localhost')) {
+ res.setHeader('Access-Control-Allow-Origin', req.headers.origin);
+ res.setHeader(
+ 'Access-Control-Expose-Headers',
+ [
+ HTTP.HEADERS.X_LOGOUT,
+ HTTP.HEADERS.X_LOGOUT_URL,
+ HTTP.HEADERS.X_SFDC_ID,
+ HTTP.HEADERS.X_SFDC_API_VERSION,
+ HTTP.HEADERS.X_SFDC_ID_TARGET,
+ HTTP.HEADERS.X_SFDC_API_TARGET_VERSION,
+ HTTP.HEADERS.X_SFDC_ORG_CONNECTION_ERROR,
+ HTTP.HEADERS.X_SFDC_Session,
+ HTTP.HEADERS.X_INCLUDE_CALL_OPTIONS,
+ HTTP.HEADERS.X_CACHE_RESPONSE,
+ HTTP.HEADERS.X_CACHE_KEY,
+ HTTP.HEADERS.X_CACHE_AGE,
+ HTTP.HEADERS.X_CACHE_EXP,
+ ].join(', ')
+ );
+ }
+ next();
+ });
+ app.options(
+ '*',
+ logRoute,
+ (req: express.Request, res: express.Response, next: express.NextFunction) => {
+ res.setHeader('Access-Control-Allow-Credentials', 'true');
+ res.setHeader('Access-Control-Allow-Origin', '*');
+ next();
+ },
+ cors({
+ origin: true,
+ exposedHeaders: [
HTTP.HEADERS.X_LOGOUT,
HTTP.HEADERS.X_LOGOUT_URL,
HTTP.HEADERS.X_SFDC_ID,
@@ -215,125 +272,91 @@ if (ENV.ENVIRONMENT === 'development') {
HTTP.HEADERS.X_CACHE_KEY,
HTTP.HEADERS.X_CACHE_AGE,
HTTP.HEADERS.X_CACHE_EXP,
- ].join(', ')
- );
- }
- next();
- });
- app.options(
- '*',
- logRoute,
- (req: express.Request, res: express.Response, next: express.NextFunction) => {
- res.setHeader('Access-Control-Allow-Credentials', 'true');
- res.setHeader('Access-Control-Allow-Origin', '*');
- next();
- },
- cors({
- origin: true,
- exposedHeaders: [
- HTTP.HEADERS.X_LOGOUT,
- HTTP.HEADERS.X_LOGOUT_URL,
- HTTP.HEADERS.X_SFDC_ID,
- HTTP.HEADERS.X_SFDC_API_VERSION,
- HTTP.HEADERS.X_SFDC_ID_TARGET,
- HTTP.HEADERS.X_SFDC_API_TARGET_VERSION,
- HTTP.HEADERS.X_SFDC_ORG_CONNECTION_ERROR,
- HTTP.HEADERS.X_SFDC_Session,
- HTTP.HEADERS.X_INCLUDE_CALL_OPTIONS,
- HTTP.HEADERS.X_CACHE_RESPONSE,
- HTTP.HEADERS.X_CACHE_KEY,
- HTTP.HEADERS.X_CACHE_AGE,
- HTTP.HEADERS.X_CACHE_EXP,
- ],
- })
- );
- app.use('/platform-event', logRoute, cors({ origin: /http:\/\/localhost:[0-9]+$/ }), platformEventRoutes);
-} else {
- app.use('/platform-event', logRoute, platformEventRoutes);
-}
-
-app.use(raw({ limit: '30mb', type: ['text/csv'] }));
-app.use(raw({ limit: '30mb', type: ['application/zip'] }));
-app.use(json({ limit: '20mb', type: ['json', 'application/csp-report'] }));
-app.use(urlencoded({ extended: true }));
+ ],
+ })
+ );
+ app.use('/platform-event', logRoute, cors({ origin: /http:\/\/localhost:[0-9]+$/ }), platformEventRoutes);
+ } else {
+ app.use('/platform-event', logRoute, platformEventRoutes);
+ }
-app.use('/healthz', healthCheck);
-app.use('/api', logRoute, apiRoutes);
-app.use('/static', logRoute, staticAuthenticatedRoutes); // these are routes that return files or redirect (e.x. NOT JSON)
-app.use('/oauth', logRoute, oauthRoutes); // NOTE: there are also static files with same path
+ app.use(raw({ limit: '30mb', type: ['text/csv'] }));
+ app.use(raw({ limit: '30mb', type: ['application/zip'] }));
+ app.use(json({ limit: '20mb', type: ['json', 'application/csp-report'] }));
+ app.use(urlencoded({ extended: true }));
-if (ENV.ENVIRONMENT !== 'production' || ENV.IS_CI) {
- app.use('/test', logRoute, testRoutes);
-}
+ app.use('/healthz', healthCheck);
+ app.use('/api', logRoute, apiRoutes);
+ app.use('/static', logRoute, staticAuthenticatedRoutes); // these are routes that return files or redirect (e.x. NOT JSON)
+ app.use('/oauth', logRoute, oauthRoutes); // NOTE: there are also static files with same path
-// const server = app.listen(Number(ENV.PORT), () => {
-// logger.info('Listening at http://localhost:' + ENV.PORT);
-// });
+ if (ENV.ENVIRONMENT !== 'production' || ENV.IS_CI) {
+ app.use('/test', logRoute, testRoutes);
+ }
-const server = httpServer.listen(Number(ENV.PORT), () => {
- logger.info('Listening at http://localhost:' + ENV.PORT);
- logger.info('[ENVIRONMENT]: ' + ENV.ENVIRONMENT);
-});
+ // const server = app.listen(Number(ENV.PORT), () => {
+ // logger.info('Listening at http://localhost:' + ENV.PORT);
+ // });
-if (!environment.production) {
- app.use(cors({ origin: /http:\/\/localhost:[0-9]+$/ }));
-}
+ const server = httpServer.listen(Number(ENV.PORT), () => {
+ logger.info('Listening at http://localhost:' + ENV.PORT);
+ logger.info('[ENVIRONMENT]: ' + ENV.ENVIRONMENT);
+ });
-app.use('/codicon.ttf', (req: express.Request, res: express.Response) => {
- res.sendFile(join(__dirname, './assets/js/monaco/vs/base/browser/ui/codicons/codicon/codicon.ttf'), { maxAge: '1m' });
-});
-app.use('/assets', express.static(join(__dirname, './assets'), { maxAge: '1m' }));
-app.use('/fonts', express.static(join(__dirname, './assets/fonts')));
+ if (!environment.production) {
+ app.use(cors({ origin: /http:\/\/localhost:[0-9]+$/ }));
+ }
-if (environment.production || ENV.IS_CI) {
- app.use(express.static(join(__dirname, '../landing/exported')));
- app.use(express.static(join(__dirname, '../jetstream')));
+ app.use('/codicon.ttf', (req: express.Request, res: express.Response) => {
+ res.sendFile(join(__dirname, './assets/js/monaco/vs/base/browser/ui/codicons/codicon/codicon.ttf'), { maxAge: '1m' });
+ });
+ app.use('/assets', express.static(join(__dirname, './assets'), { maxAge: '1m' }));
+ app.use('/fonts', express.static(join(__dirname, './assets/fonts')));
+ app.use(express.static(join(__dirname, '../landing')));
// SERVICE WORKER FOR DOWNLOAD ZIP
app.use('/download-zip.sw.js', (req: express.Request, res: express.Response) => {
res.sendFile(join(__dirname, '../download-zip-sw/download-zip.sw.js'), { maxAge: '1m' });
});
- app.use('/app', logRoute, (req: express.Request, res: express.Response) => {
- res.sendFile(join(__dirname, '../jetstream/index.html'));
- });
-} else {
- // localhost will only use landing page resources
- app.use(express.static(join(__dirname, '../../../dist/apps/landing/exported')));
- app.use('/download-zip.sw.js', (req: express.Request, res: express.Response) => {
- res.sendFile(join(__dirname, '../../../dist/apps/download-zip-sw/download-zip.sw.js'), { maxAge: '1m' });
+
+ if (environment.production || ENV.IS_CI) {
+ app.use(express.static(join(__dirname, '../jetstream')));
+ app.use('/app', (req: express.Request, res: express.Response) => {
+ res.sendFile(join(__dirname, '../jetstream/index.html'));
+ });
+ }
+
+ /**
+ * SEND 418 FOR BLOCKED ROUTES THAT ARE PRODUCED BY BOTS
+ */
+
+ const BOT_ROUTES = [
+ '/_ignition*',
+ '/*.aspx',
+ '/*.env*',
+ '/*.php',
+ '/*.txt',
+ '/*.xml',
+ '/*magento_version',
+ '/*phpinfo*',
+ '/*wp-content*',
+ '/*wp-includes*',
+ '//feed*',
+ '/%20',
+ '/ALFA_DATA*',
+ '/cgi-bin*',
+ '/humans.txt',
+ '/tmp*',
+ '/view-source*',
+ '/wp*',
+ ];
+
+ BOT_ROUTES.forEach((route) => app.use(route, blockBotHandler));
+
+ app.use('*', notFoundMiddleware);
+ app.use(uncaughtErrorHandler);
+
+ server.on('error', (error: Error) => {
+ logger.error('[SERVER][ERROR]', error.message);
+ logger.error(error.stack);
});
}
-
-/**
- * SEND 418 FOR BLOCKED ROUTES THAT ARE PRODUCED BY BOTS
- */
-
-const BOT_ROUTES = [
- '/_ignition*',
- '/*.aspx',
- '/*.env*',
- '/*.php',
- '/*.txt',
- '/*.xml',
- '/*magento_version',
- '/*phpinfo*',
- '/*wp-content*',
- '/*wp-includes*',
- '//feed*',
- '/%20',
- '/ALFA_DATA*',
- '/cgi-bin*',
- '/humans.txt',
- '/tmp*',
- '/view-source*',
- '/wp*',
-];
-
-BOT_ROUTES.forEach((route) => app.use(route, blockBotHandler));
-
-app.use('*', notFoundMiddleware);
-app.use(uncaughtErrorHandler);
-
-server.on('error', (error: Error) => {
- logger.error('[SERVER][ERROR]', error.message);
- logger.error(error.stack);
-});
diff --git a/apps/api/tsconfig.app.json b/apps/api/tsconfig.app.json
index fa2ddf45d..ae0dc35c7 100644
--- a/apps/api/tsconfig.app.json
+++ b/apps/api/tsconfig.app.json
@@ -1,10 +1,10 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
- "allowSyntheticDefaultImports": true,
"outDir": "../../dist/out-tsc",
+ "module": "commonjs",
"types": ["node", "google.accounts", "google.picker", "gapi.auth2", "gapi.client.drive"]
},
- "exclude": ["**/*.spec.ts", "**/*.test.ts", "jest.config.ts"],
- "include": ["**/*.ts"]
+ "exclude": ["jest.config.ts", "src/**/*.spec.ts", "src/**/*.test.ts"],
+ "include": ["src/**/*.ts"]
}
diff --git a/apps/api/tsconfig.json b/apps/api/tsconfig.json
index 3448fd0a6..c1e2dd4e8 100644
--- a/apps/api/tsconfig.json
+++ b/apps/api/tsconfig.json
@@ -1,11 +1,7 @@
{
"extends": "../../tsconfig.base.json",
- "compilerOptions": {
- "strictNullChecks": true,
- "types": ["node", "jest", "express", "google.accounts", "google.picker", "gapi.auth2", "gapi.client.drive"]
- },
- "include": [],
"files": [],
+ "include": [],
"references": [
{
"path": "./tsconfig.app.json"
@@ -13,5 +9,8 @@
{
"path": "./tsconfig.spec.json"
}
- ]
+ ],
+ "compilerOptions": {
+ "esModuleInterop": true
+ }
}
diff --git a/apps/api/tsconfig.spec.json b/apps/api/tsconfig.spec.json
index 148da8555..f6d8ffcc9 100644
--- a/apps/api/tsconfig.spec.json
+++ b/apps/api/tsconfig.spec.json
@@ -5,5 +5,5 @@
"module": "commonjs",
"types": ["jest", "node"]
},
- "include": ["**/*.spec.ts", "**/*.test.ts", "**/*.d.ts", "jest.config.ts"]
+ "include": ["jest.config.ts", "src/**/*.test.ts", "src/**/*.spec.ts", "src/**/*.d.ts"]
}
diff --git a/apps/cron-tasks/project.json b/apps/cron-tasks/project.json
index ce2743c22..3c58b92d6 100644
--- a/apps/cron-tasks/project.json
+++ b/apps/cron-tasks/project.json
@@ -5,7 +5,7 @@
"projectType": "application",
"targets": {
"build": {
- "executor": "@nrwl/webpack:webpack",
+ "executor": "@nx/webpack:webpack",
"outputs": ["{options.outputPath}"],
"options": {
"outputPath": "dist/apps/cron-tasks",
@@ -18,12 +18,17 @@
{
"entryName": "inactive-account-deletion",
"entryPath": "apps/cron-tasks/src/inactive-account-deletion.ts"
+ },
+ {
+ "entryName": "save-analytics-summary",
+ "entryPath": "apps/cron-tasks/src/save-analytics-summary.ts"
}
],
"tsConfig": "apps/cron-tasks/tsconfig.app.json",
"assets": [],
"target": "node",
- "compiler": "tsc"
+ "compiler": "tsc",
+ "webpackConfig": "apps/cron-tasks/webpack.config.js"
},
"configurations": {
"production": {
@@ -40,25 +45,21 @@
}
},
"serve": {
- "executor": "@nrwl/node:node",
+ "executor": "@nx/js:node",
"options": {
"buildTarget": "cron-tasks:build",
"inspect": true
}
},
"lint": {
- "executor": "@nrwl/linter:eslint",
- "outputs": ["{options.outputFile}"],
- "options": {
- "lintFilePatterns": ["apps/cron-tasks/**/*.ts"]
- }
+ "executor": "@nx/eslint:lint",
+ "outputs": ["{options.outputFile}"]
},
"test": {
- "executor": "@nrwl/jest:jest",
+ "executor": "@nx/jest:jest",
"outputs": ["{workspaceRoot}/coverage/apps/cron-tasks"],
"options": {
- "jestConfig": "apps/cron-tasks/jest.config.ts",
- "passWithNoTests": true
+ "jestConfig": "apps/cron-tasks/jest.config.ts"
}
}
},
diff --git a/apps/cron-tasks/src/config/db.config.ts b/apps/cron-tasks/src/config/db.config.ts
index 42688b7da..1154e178f 100644
--- a/apps/cron-tasks/src/config/db.config.ts
+++ b/apps/cron-tasks/src/config/db.config.ts
@@ -11,7 +11,6 @@ if (ENV.PRISMA_DEBUG) {
export const prisma = new PrismaClient({
log,
- rejectOnNotFound: false,
});
export const pgPool = new Pool({
diff --git a/apps/cron-tasks/src/config/env-config.ts b/apps/cron-tasks/src/config/env-config.ts
index d667ba20b..5ba31d295 100644
--- a/apps/cron-tasks/src/config/env-config.ts
+++ b/apps/cron-tasks/src/config/env-config.ts
@@ -35,4 +35,7 @@ export const ENV = {
MAILGUN_API_KEY: process.env.MAILGUN_API_KEY,
MAILGUN_PUBLIC_KEY: process.env.MAILGUN_PUBLIC_KEY,
MAILGUN_WEBHOOK_KEY: process.env.MAILGUN_WEBHOOK_KEY,
+
+ AMPLITUDE_API_KEY: process.env.AMPLITUDE_API_KEY,
+ AMPLITUDE_SECRET_KEY: process.env.AMPLITUDE_SECRET_KEY,
};
diff --git a/apps/cron-tasks/src/save-analytics-summary.ts b/apps/cron-tasks/src/save-analytics-summary.ts
new file mode 100644
index 000000000..d28410623
--- /dev/null
+++ b/apps/cron-tasks/src/save-analytics-summary.ts
@@ -0,0 +1,75 @@
+import { prisma } from './config/db.config';
+import { logger } from './config/logger.config';
+import { getAmplitudeChart } from './utils/amplitude-dashboard-api';
+
+const CHART_IDS = {
+ LOAD: {
+ YEAR: 'rens73dw',
+ MONTH: 'iyt2blcf',
+ WEEK: 'so649gq9',
+ },
+ QUERY: {
+ YEAR: '33tylew6',
+ MONTH: 'icruamqk',
+ WEEK: 'zs6f0dl8',
+ },
+};
+
+(async () => {
+ try {
+ logger.info('[ANALYTICS SUMMARY] Exporting data from amplitude');
+
+ const CHART_LOAD_YEAR = (await getAmplitudeChart(CHART_IDS.LOAD.YEAR)).data.seriesCollapsed[0][0].value;
+ const CHART_LOAD_MONTH = (await getAmplitudeChart(CHART_IDS.LOAD.MONTH)).data.seriesCollapsed[0][0].value;
+ const CHART_LOAD_WEEK = (await getAmplitudeChart(CHART_IDS.LOAD.WEEK)).data.seriesCollapsed[0][0].value;
+
+ const CHART_QUERY_YEAR = (await getAmplitudeChart(CHART_IDS.QUERY.YEAR)).data.seriesCollapsed[0][0].value;
+ const CHART_QUERY_MONTH = (await getAmplitudeChart(CHART_IDS.QUERY.MONTH)).data.seriesCollapsed[0][0].value;
+ const CHART_QUERY_WEEK = (await getAmplitudeChart(CHART_IDS.QUERY.WEEK)).data.seriesCollapsed[0][0].value;
+
+ logger.info('[ANALYTICS SUMMARY] Saving data to database');
+
+ const loadResults = await prisma.analyticsSummary.upsert({
+ create: {
+ type: 'LOAD_SUMMARY',
+ year: CHART_LOAD_YEAR,
+ month: CHART_LOAD_MONTH,
+ week: CHART_LOAD_WEEK,
+ },
+ update: {
+ year: CHART_LOAD_YEAR,
+ month: CHART_LOAD_MONTH,
+ week: CHART_LOAD_WEEK,
+ },
+ where: {
+ type: 'LOAD_SUMMARY',
+ },
+ });
+
+ logger.info('[ANALYTICS SUMMARY] Load data saved', { loadResults });
+
+ const queryResults = await prisma.analyticsSummary.upsert({
+ create: {
+ type: 'QUERY_SUMMARY',
+ year: CHART_QUERY_YEAR,
+ month: CHART_QUERY_MONTH,
+ week: CHART_QUERY_WEEK,
+ },
+ update: {
+ year: CHART_QUERY_YEAR,
+ month: CHART_QUERY_MONTH,
+ week: CHART_QUERY_WEEK,
+ },
+ where: {
+ type: 'QUERY_SUMMARY',
+ },
+ });
+
+ logger.info('[ANALYTICS SUMMARY] Query data saved', { queryResults });
+
+ logger.info('[ANALYTICS SUMMARY] Done');
+ } catch (ex) {
+ logger.error('[ANALYTICS SUMMARY][ERROR] %o', ex.message);
+ logger.error(ex.stack);
+ }
+})();
diff --git a/apps/cron-tasks/src/utils/amplitude-dashboard-api.ts b/apps/cron-tasks/src/utils/amplitude-dashboard-api.ts
new file mode 100644
index 000000000..3327dd85b
--- /dev/null
+++ b/apps/cron-tasks/src/utils/amplitude-dashboard-api.ts
@@ -0,0 +1,21 @@
+import axios from 'axios';
+import { ENV } from '../config/env-config';
+import { logger } from '../config/logger.config';
+import { AmplitudeChartResult } from './types';
+
+const axiosAuth0 = axios.create({
+ baseURL: `https://amplitude.com/api/3`,
+});
+
+const BASIC_AUTH_HEADER = `Basic ${Buffer.from(`${ENV.AMPLITUDE_API_KEY}:${ENV.AMPLITUDE_SECRET_KEY}`).toString('base64')}`;
+
+export async function getAmplitudeChart(chartId: string) {
+ logger.log('info', `getAmplitudeChart: ${chartId}`);
+ return await axiosAuth0
+ .get(`/chart/${chartId}/query`, {
+ headers: {
+ Authorization: BASIC_AUTH_HEADER,
+ },
+ })
+ .then((result) => result.data);
+}
diff --git a/apps/cron-tasks/src/utils/types.ts b/apps/cron-tasks/src/utils/types.ts
index 11dbbe0b5..363557c50 100644
--- a/apps/cron-tasks/src/utils/types.ts
+++ b/apps/cron-tasks/src/utils/types.ts
@@ -5,3 +5,50 @@ export interface DeleteResult {
orgCount: number | null;
localDatabaseId: string | null;
}
+
+export interface AmplitudeChartResult {
+ data: AmplitudeData;
+ timeComputed: number;
+ wasCached: boolean;
+ cacheFreshness: string;
+ novaRuntime: number;
+ novaRequestDuration: number;
+ novaCost: number;
+ throttleTime: number;
+ minSampleRate: number;
+ transformationIds: any[];
+ backend: string;
+ realtimeDataMissing: boolean;
+ timedOutRealtimeData: boolean;
+ missedCacheAndNotComputed: boolean;
+ partialMergedAndNewUserInformation: boolean;
+ prunedResult: boolean;
+ hitChunkGroupByLimit: boolean;
+ subcluster: number;
+ millisSinceComputed: number;
+ earliestServerReceivedTime: number;
+ queryIds: string[];
+}
+
+export interface AmplitudeData {
+ series: AmplitudeSeries[][];
+ seriesCollapsed: AmplitudeSeriesCollapsed[][];
+ seriesLabels: number[];
+ seriesMeta: AmplitudeSeriesMeta[];
+ xValues: string[];
+}
+
+export interface AmplitudeSeries {
+ setId: string;
+ value: number;
+}
+
+export interface AmplitudeSeriesCollapsed {
+ setId: string;
+ value: number;
+}
+
+export interface AmplitudeSeriesMeta {
+ segmentIndex: number;
+ eventIndex: number;
+}
diff --git a/apps/cron-tasks/webpack.config.js b/apps/cron-tasks/webpack.config.js
new file mode 100644
index 000000000..0be4f4f03
--- /dev/null
+++ b/apps/cron-tasks/webpack.config.js
@@ -0,0 +1,8 @@
+const { composePlugins, withNx } = require('@nx/webpack');
+
+// Nx plugins for webpack.
+module.exports = composePlugins(withNx(), (config) => {
+ // Note: This was added by an Nx migration. Webpack builds are required to have a corresponding Webpack config file.
+ // See: https://nx.dev/recipes/webpack/webpack-config-setup
+ return config;
+});
diff --git a/apps/docs/.eslintrc.json b/apps/docs/.eslintrc.json
new file mode 100644
index 000000000..a0f4cb714
--- /dev/null
+++ b/apps/docs/.eslintrc.json
@@ -0,0 +1,22 @@
+{
+ "extends": "../../.eslintrc.json",
+ "rules": {},
+ "ignorePatterns": ["!**/*"],
+ "overrides": [
+ {
+ "files": ["*.ts", "*.tsx", "*.js", "*.jsx"],
+ "parserOptions": {
+ "project": ["apps/docs/tsconfig.*?.json"]
+ },
+ "rules": {}
+ },
+ {
+ "files": ["*.ts", "*.tsx"],
+ "rules": {}
+ },
+ {
+ "files": ["*.js", "*.jsx"],
+ "rules": {}
+ }
+ ]
+}
diff --git a/apps/docs/.gitignore b/apps/docs/.gitignore
new file mode 100644
index 000000000..b2d6de306
--- /dev/null
+++ b/apps/docs/.gitignore
@@ -0,0 +1,20 @@
+# Dependencies
+/node_modules
+
+# Production
+/build
+
+# Generated files
+.docusaurus
+.cache-loader
+
+# Misc
+.DS_Store
+.env.local
+.env.development.local
+.env.test.local
+.env.production.local
+
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
diff --git a/apps/docs/README.md b/apps/docs/README.md
new file mode 100644
index 000000000..aaba2fa1e
--- /dev/null
+++ b/apps/docs/README.md
@@ -0,0 +1,41 @@
+# Website
+
+This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator.
+
+### Installation
+
+```
+$ yarn
+```
+
+### Local Development
+
+```
+$ yarn start
+```
+
+This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
+
+### Build
+
+```
+$ yarn build
+```
+
+This command generates static content into the `build` directory and can be served using any static contents hosting service.
+
+### Deployment
+
+Using SSH:
+
+```
+$ USE_SSH=true yarn deploy
+```
+
+Not using SSH:
+
+```
+$ GIT_USER= yarn deploy
+```
+
+If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
diff --git a/apps/docs/docs/automation-control/automation-control.md b/apps/docs/docs/automation-control/automation-control.mdx
similarity index 97%
rename from apps/docs/docs/automation-control/automation-control.md
rename to apps/docs/docs/automation-control/automation-control.mdx
index 782e6c43e..05f5eb45d 100644
--- a/apps/docs/docs/automation-control/automation-control.md
+++ b/apps/docs/docs/automation-control/automation-control.mdx
@@ -90,7 +90,7 @@ Exporting will not include any pending changes you have made, it will always mat
### Export as Zip
-This option will download a metadata package based on the current state of all metadata in Salesforce. This is a full backup of all the metadata and will allow you to re-deploy the metadata at any point in the future using the [Deploy Metadata > Deploy and Compare Metadata](../deploy/deploy-metadata.md) page and choosing **Upload Metadata Zip**.
+This option will download a metadata package based on the current state of all metadata in Salesforce. This is a full backup of all the metadata and will allow you to re-deploy the metadata at any point in the future using the [Deploy Metadata > Deploy and Compare Metadata](../deploy/deploy-metadata.mdx) page and choosing **Upload Metadata Zip**.
#### Re-deploying changes
diff --git a/apps/docs/docs/deploy/create-object-form.png b/apps/docs/docs/deploy/create-object-form.png
new file mode 100644
index 000000000..f0621a772
Binary files /dev/null and b/apps/docs/docs/deploy/create-object-form.png differ
diff --git a/apps/docs/docs/deploy/create-object-permissions.png b/apps/docs/docs/deploy/create-object-permissions.png
new file mode 100644
index 000000000..da7c1aa84
Binary files /dev/null and b/apps/docs/docs/deploy/create-object-permissions.png differ
diff --git a/apps/docs/docs/deploy/create-object-results.png b/apps/docs/docs/deploy/create-object-results.png
new file mode 100644
index 000000000..ba4234ec6
Binary files /dev/null and b/apps/docs/docs/deploy/create-object-results.png differ
diff --git a/apps/docs/docs/deploy/deploy-fields.md b/apps/docs/docs/deploy/deploy-fields.mdx
similarity index 100%
rename from apps/docs/docs/deploy/deploy-fields.md
rename to apps/docs/docs/deploy/deploy-fields.mdx
diff --git a/apps/docs/docs/deploy/deploy-metadata.md b/apps/docs/docs/deploy/deploy-metadata.mdx
similarity index 100%
rename from apps/docs/docs/deploy/deploy-metadata.md
rename to apps/docs/docs/deploy/deploy-metadata.mdx
diff --git a/apps/docs/docs/deploy/deploy-object.mdx b/apps/docs/docs/deploy/deploy-object.mdx
new file mode 100644
index 000000000..f65ef073a
--- /dev/null
+++ b/apps/docs/docs/deploy/deploy-object.mdx
@@ -0,0 +1,48 @@
+---
+id: deploy-object
+title: Create Custom Object
+description: Jetstream makes it easy to quickly create a new Custom Object.
+keywords: [
+ salesforce,
+ salesforce admin,
+ salesforce developer,
+ salesforce automation,
+ salesforce workbench
+ create custom object,
+ update custom object,
+ field level security,
+ page layout,
+ ]
+sidebar_label: Create Custom Object
+slug: /deploy-object
+---
+
+## Getting Started
+
+You can quickly deploy a new Custom Object in Jetstream from the **Create Object and Fields** page.
+
+Jetstream allows creating one object at a time, usually you would do this prior to creating new custom fields for the object.
+
+From the **Create Object and Fields** page, click the **Create New Object** button to open the modal to guide you through the process.
+
+## Configure Permissions
+
+Choose the profiles and permission sets you would like to assign permissions to, and choose which permissions you would like to apply.
+
+
+
+## Configure Object
+
+Configure your object. Jetstream will automatically populate the object plural label, API name, and Field Name fields for you when you modify the label to make the process simple. You can adjust the automatically populated values as needed.
+
+Refer to Salesforce documentation on specific details for configuring a new custom object.
+
+
+
+## Review Results
+
+Once you start the deployment, review the results on the the **Results** tab.
+
+Once you close the modal, the list of objects will be refreshed and the newly created object will be available for selection to create new custom fields.
+
+
diff --git a/apps/docs/docs/deploy/formula-evaluator.md b/apps/docs/docs/deploy/formula-evaluator.mdx
similarity index 83%
rename from apps/docs/docs/deploy/formula-evaluator.md
rename to apps/docs/docs/deploy/formula-evaluator.mdx
index 6f1891d0f..6f3496250 100644
--- a/apps/docs/docs/deploy/formula-evaluator.md
+++ b/apps/docs/docs/deploy/formula-evaluator.mdx
@@ -26,6 +26,7 @@ the formula editor comes with some really nice features:
- Full-featured editor with syntax highlighting
- Auto-complete to help build your formulas
- Ability to run and test your formulas to see the outcome
+- Ability to test formulas as different users
:::note
@@ -36,12 +37,13 @@ Some formula functions are not yet supported and some complex formulas may not w
## Evaluating a formula
1. Choose which object you would like to work on
-2. Choose how numbers should be treated for fields with no value
-3. Choose if you want to write a formula from scratch or if you have an existing field you want to start from
+2. Choose which user to evaluate as, your user is the default selected user
+3. Choose how numbers should be treated for fields with no value
+4. Choose if you want to write a formula from scratch or if you have an existing field you want to start from
1. If you choose an existing field, the formula editor will be populated with the existing formula
-4. Write your formula
-5. Search and choose a record
-6. Click "Test Formula"
+5. Write your formula
+6. Search and choose a record
+7. Click "Test Formula"
### Editor
diff --git a/apps/docs/docs/developer/anonymous-apex.md b/apps/docs/docs/developer/anonymous-apex.mdx
similarity index 100%
rename from apps/docs/docs/developer/anonymous-apex.md
rename to apps/docs/docs/developer/anonymous-apex.mdx
diff --git a/apps/docs/docs/developer/debug-logs.md b/apps/docs/docs/developer/debug-logs.mdx
similarity index 100%
rename from apps/docs/docs/developer/debug-logs.md
rename to apps/docs/docs/developer/debug-logs.mdx
diff --git a/apps/docs/docs/developer/export-object-metadata.md b/apps/docs/docs/developer/export-object-metadata.mdx
similarity index 100%
rename from apps/docs/docs/developer/export-object-metadata.md
rename to apps/docs/docs/developer/export-object-metadata.mdx
diff --git a/apps/docs/docs/developer/platform-events.md b/apps/docs/docs/developer/platform-events.mdx
similarity index 100%
rename from apps/docs/docs/developer/platform-events.md
rename to apps/docs/docs/developer/platform-events.mdx
diff --git a/apps/docs/docs/developer/salesforce-api.md b/apps/docs/docs/developer/salesforce-api.mdx
similarity index 100%
rename from apps/docs/docs/developer/salesforce-api.md
rename to apps/docs/docs/developer/salesforce-api.mdx
diff --git a/apps/docs/docs/getting-started/_org-troubleshooting-table.mdx b/apps/docs/docs/getting-started/_org-troubleshooting-table.mdx
index ef1376ff9..818384364 100644
--- a/apps/docs/docs/getting-started/_org-troubleshooting-table.mdx
+++ b/apps/docs/docs/getting-started/_org-troubleshooting-table.mdx
@@ -9,7 +9,7 @@ export const OAuthSettingsList = ({ children }) => (
export const OAuthSettingsSolution = ({ children }) => (
- OAuth configuration settings can be managed in Setup -> Manage Connected Apps ->{' '}
+ OAuth configuration settings can be managed in Setup → Manage Connected Apps →{' '}
Find Jetstream and click Edit.
From here, you have the ability to:
@@ -30,13 +30,8 @@ export const OAuthSettingsSolution = ({ children }) => (
If you have issues adding your org, here are some likely causes and solutions.
-| Problem | Possible Causes | Solution |
-| ----------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
-| You are unable to login, your username and password is not accepted from Salesforce | Your org may have a login restriction to only allow access by logging in using the custom domain. | This setting can be found in Salesforce under `Setup` -> `My Domain` -> `Policies` -> `Prevent login from https://login.salesforce.com`.
If this is set to true, then you will want to use the **Custom Login URL** option and provide custom domain shown on the **Current My Domain URL** on the setup page. |
-| You recieve an error message after successfully logging in | | |
-
-:::important
-
-Jetstream uses a wide range of IP addresses, so you may need to relax IP address restrictions for the Jetstream Connected App.
-
-:::
+| Problem | Possible Causes | Solution |
+| --------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| You are unable to login, your username and password is not accepted from Salesforce | Your org may have a login restriction to only allow access by logging in using the custom domain. | This setting can be found in Salesforce under `Setup` → `My Domain` → `Policies` → `Prevent login from https://login.salesforce.com`.
If this is set to true, then you will want to use the **Custom Login URL** option and provide custom domain shown on the **Current My Domain URL** on the setup page. |
+| You receive an error message after successfully logging in | | |
+| When you click a Salesforce link, you are required to "Choose a Verification Method" to continue to Salesforce. | You may have very strict session settings in Salesforce, such as "High Assurance". | By default, Jetstream uses [Frontdoor](https://help.salesforce.com/s/articleView?id=sf.security_frontdoorjsp.htm&type=5) to login using your existing Jetstream session.
This can be disabled by navigating to your settings and enabling the option to **Don't Auto-Login on Link Clicks**.