diff --git a/.env.sample b/.env.sample index c66ada9f85..6df99cfee9 100644 --- a/.env.sample +++ b/.env.sample @@ -7,18 +7,22 @@ DATABASE_NAME=metagame-db HASURA_GRAPHQL_ADMIN_SECRET=metagame_secret HASURA_PORT=8080 -# packages/web +# packages/web/.env NEXT_TELEMETRY_DISABLED=1 NEXT_PUBLIC_INFURA_ID=781d8466252d47508e177b8637b1c2fd NEXT_PUBLIC_GRAPHQL_URL=http://localhost:8080/v1/graphql # use https://api-staging.metagame.wtf/v1/graphql for staging backend NEXT_BRIGHTID_APP_URL=https://app.brightid.org NEXT_BRIGHTID_NODE_URL=http:%2f%2fnode.brightid.org NEXT_PUBLIC_VERCEL_URL=http://localhost:3000 +WEB3_STORAGE_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkaWQ6ZXRocjoweGE0OWIyMGIyMzVGY2E3N0QzRURlZWFDMzlDODkyZkVENmUzOTU5OTQiLCJpc3MiOiJ3ZWIzLXN0b3JhZ2UiLCJpYXQiOjE2MzA3MDMyMDg0ODEsIm5hbWUiOiJNeU1ldGEifQ.q7_i_XmkIMb6_6u9pNI6tkNUq0vhiJ0e1oV8GLdeez0 +NEXT_PUBLIC_CERAMIC_URL=https://ceramic-clay.3boxlabs.com +NEXT_PUBLIC_CERAMIC_NETWORK=testnet-clay # packages/discord-bot GRAPHQL_URL=http://localhost:8080/v1/graphql FRONTEND_URL=http://localhost:3000 + # packages/backend IMGIX_TOKEN= # GITHUB_API_TOKEN= # IMPORTANT! create one at https://github.com/settings/tokens diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 0000000000..a9ba028cee --- /dev/null +++ b/.eslintignore @@ -0,0 +1 @@ +.eslintrc.js diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 0000000000..61e1d54aef --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,83 @@ +module.exports = { + root: true, + extends: [ + 'airbnb-base', + 'airbnb-typescript/base', + 'airbnb/hooks', + 'plugin:@typescript-eslint/eslint-recommended', + 'plugin:@typescript-eslint/recommended', + 'plugin:jest/recommended', + 'prettier', + ], + parserOptions: { + project: 'tsconfig.json', + tsconfigRootDir: __dirname, + ecmaVersion: 6, + }, + ignorePatterns: ['packages/web/next.config.js'], + plugins: ['simple-import-sort'], + settings: { + 'import/resolver': { + typescript: {}, + }, + }, + rules: { + '@typescript-eslint/explicit-function-return-type': 'off', + + // Doesn't work for FC: https://github.com/yannickcr/eslint-plugin-react/issues/2353 + 'react/prop-types': 'off', + + // Prefer non-default exports + 'import/no-default-export': 'off', + 'import/prefer-default-export': 'off', + + // Auto-sort imports + 'sort-imports': 'off', + 'import/order': 'off', + 'simple-import-sort/imports': 'error', + 'simple-import-sort/exports': 'error', + + // unary operators are ok + 'no-plusplus': 'off', + + // Using a type system makes it safe enough to spread props + 'react/jsx-props-no-spreading': 'off', + + // we want to be able to use functions before definition + '@typescript-eslint/no-use-before-define': 'off', + + '@typescript-eslint/ban-ts-comment': [ + 'error', + { + 'ts-expect-error': 'allow-with-description', + minimumDescriptionLength: 5, + }, + ], + 'no-bitwise': 'off', + }, + overrides: [ + { + // assuming Next.js application + files: '**/pages/**/*.{ts,tsx}', + rules: { + 'react/react-in-jsx-scope': 'off', // react is a global in this folder + 'import/no-default-export': 'off', // pages have to have a default export + 'import/prefer-default-export': 'off', + '@typescript-eslint/explicit-module-boundary-types': [ + // So we can infer prop types + 'warn', + { allowedNames: ['getStaticProps'] }, + ], + }, + }, + { + files: ['**/*.stories.*'], + rules: { + // Storybook requires default exports for stories + 'import/no-default-export': 'off', + '@typescript-eslint/explicit-module-boundary-types': 'off', + }, + }, + ], + parser: '@typescript-eslint/parser', +}; diff --git a/.eslintrc.json b/.eslintrc.json deleted file mode 100644 index bedf47eb1d..0000000000 --- a/.eslintrc.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "root": true, - "extends": [ - "airbnb-base", - "airbnb-typescript/base", - "airbnb/hooks", - "plugin:@typescript-eslint/eslint-recommended", - "plugin:@typescript-eslint/recommended", - "plugin:jest/recommended", - "prettier" - ], - "parserOptions": { - "project": "./tsconfig.json", - "ecmaVersion": 6 - }, - "ignorePatterns": ["packages/web/next.config.js"], - "plugins": ["simple-import-sort"], - "settings": { - "import/resolver": { - "typescript": {} - } - }, - "rules": { - "@typescript-eslint/explicit-function-return-type": "off", - - // Doesn't work for FC: https://github.com/yannickcr/eslint-plugin-react/issues/2353 - "react/prop-types": "off", - - // Prefer non-default exports - "import/no-default-export": "off", - "import/prefer-default-export": "off", - - // Auto-sort imports - "sort-imports": "off", - "import/order": "off", - "simple-import-sort/imports": "error", - "simple-import-sort/exports": "error", - - // unary operators are ok - "no-plusplus": "off", - - // Using a type system makes it safe enough to spread props - "react/jsx-props-no-spreading": "off", - - // we want to be able to use functions before definition - "@typescript-eslint/no-use-before-define": "off", - - "@typescript-eslint/ban-ts-comment": [ - "error", - { - "ts-expect-error": "allow-with-description", - "minimumDescriptionLength": 5 - } - ], - "no-bitwise": "off" - }, - "ignorePatterns": ["next.config.js"], - "overrides": [ - { - // assuming Next.js application - "files": "**/pages/**/*.{ts,tsx}", - "rules": { - "react/react-in-jsx-scope": "off", // react is a global in this folder - "import/no-default-export": "off", // pages have to have a default export - "import/prefer-default-export": "off", - "@typescript-eslint/explicit-module-boundary-types": [ - // So we can infer prop types - "warn", - { "allowedNames": ["getStaticProps"] } - ] - } - }, - { - "files": ["**/*.stories.*"], - "rules": { - // Storybook requires default exports for stories - "import/no-default-export": "off", - "@typescript-eslint/explicit-module-boundary-types": "off" - } - } - ], - "parser": "@typescript-eslint/parser" -} diff --git a/.github/workflows/PR-CI.yml b/.github/workflows/PR-CI.yml index d7ee1cb4b4..033479c830 100644 --- a/.github/workflows/PR-CI.yml +++ b/.github/workflows/PR-CI.yml @@ -23,7 +23,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 16.x - uses: actions/cache@v2 with: @@ -64,7 +64,7 @@ jobs: - uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 16.x - uses: actions/cache@v2 with: diff --git a/.github/workflows/gcp-deploy.yaml b/.github/workflows/gcp-deploy.yaml index 8a4331e4fd..e8eff028a5 100644 --- a/.github/workflows/gcp-deploy.yaml +++ b/.github/workflows/gcp-deploy.yaml @@ -2,20 +2,23 @@ name: Deploy Preview to Cloud Run on: pull_request: - branches: - - develop types: - opened - reopened + - synchronize env: PROJECT_ID: metagame-thegame REGISTRY_REGION: us-east4 REGISTRY_REPO: thegame - CLOUDRUN_REGION: us-east4 + DEPLOYMENT_DOMAIN: a.run.app CLOUDRUN_SUFFIX: mjhnbmqqna-uk - CLOUDSQL_INSTANCE_NAME: thegame - CLOUDSQL_CONNECTION_NAME: metagame-thegame:us-east4:thegame + DB_NAME: hasura-pr-${{github.event.number}} + BACKEND_SERVICE: backend-pr-${{github.event.number}} + FRONTEND_SERVICE: frontend-pr-${{github.event.number}} + BACKEND_PORT: 4000 + HASURA_PORT: 8080 + FRONTEND_PORT: 3000 jobs: build-and-deploy: @@ -23,108 +26,179 @@ jobs: runs-on: ubuntu-latest steps: + - name: First Intepolation of Variables + run: | + echo "CLOUDRUN_REGION=${{env.REGISTRY_REGION}}" >> $GITHUB_ENV + echo "CLOUDSQL_INSTANCE_NAME=${{env.REGISTRY_REPO}}" >> $GITHUB_ENV + echo "CLOUDSQL_CONNECTION_NAME=${{env.PROJECT_ID}}:${{env.REGISTRY_REGION}}:${{env.REGISTRY_REPO}}" >> $GITHUB_ENV + echo "HASURA_SERVICE=${{env.DB_NAME}}" >> $GITHUB_ENV + echo "DOCKER_REGISTRY=${{env.REGISTRY_REGION}}-docker.pkg.dev" >> $GITHUB_ENV + echo "BACKEND_HOST=${{env.BACKEND_SERVICE}}-${{env.CLOUDRUN_SUFFIX}}.${{env.DEPLOYMENT_DOMAIN}}" >> $GITHUB_ENV + + - name: Those Varaiables May Now Be Interpolated + run: | + echo "HASURA_HOST=${{env.HASURA_SERVICE}}-${{env.CLOUDRUN_SUFFIX}}.${{env.DEPLOYMENT_DOMAIN}}" >> $GITHUB_ENV + echo "DOCKER_PATH=${{env.DOCKER_REGISTRY}}/${{env.PROJECT_ID}}/${{env.REGISTRY_REPO}}" >> $GITHUB_ENV + echo "SC_MIGRATE_URL=https://${{env.BACKEND_HOST}}/actions/migrateSourceCredAccounts?force=true" >> $GITHUB_ENV + echo "FRONTEND_URL=https://${{env.FRONTEND_SERVICE}}-${{env.CLOUDRUN_SUFFIX}}.${{env.DEPLOYMENT_DOMAIN}}" >> $GITHUB_ENV + + - name: And Again In Another Step + run: | + echo "GRAPHQL_URL=https://${{env.HASURA_HOST}}/v1/graphql" >> $GITHUB_ENV + echo "BACKEND_TAG=${{env.DOCKER_PATH}}/backend:pr-${{github.event.number}}" >> $GITHUB_ENV + echo "HASURA_TAG=${{env.DOCKER_PATH}}/hasura:pr-${{github.event.number}}" >> $GITHUB_ENV + echo "FRONTEND_TAG=${{env.DOCKER_PATH}}/frontend:pr-${{github.event.number}}" >> $GITHUB_ENV + echo "DB_PASSWORD=$(cat /dev/urandom | tr -cd [:alnum:] | head -c 16)" >> $GITHUB_ENV + - name: Checkout uses: actions/checkout@v2 + with: + ref: ${{github.event.pull_request.head.sha}} - name: Login to Registry uses: docker/login-action@v1 with: - registry: ${{ env.REGISTRY_REGION }}-docker.pkg.dev + registry: ${{env.DOCKER_REGISTRY}} username: _json_key - password: ${{ secrets.GCP_SA_KEY }} + password: ${{secrets.GCP_SA_KEY}} - name: Set up gcloud CLI uses: google-github-actions/setup-gcloud@v0.2.1 with: - project_id: ${{ env.PROJECT_ID }} - service_account_key: ${{ secrets.GCP_SA_KEY }} + project_id: ${{env.PROJECT_ID}} + service_account_key: ${{secrets.GCP_SA_KEY}} export_default_credentials: true + - name: Delete Database of Hasura + continue-on-error: true + run: | + wget -q https://dl.google.com/cloudsql/cloud_sql_proxy.linux.amd64 -O cloud_sql_proxy + chmod u+x cloud_sql_proxy + ./cloud_sql_proxy -instances ${{env.CLOUDSQL_CONNECTION_NAME}} -dir /tmp/cloudsql & + PID=$! + sleep 10 + PGPASSWORD=${{secrets.GCP_POSTGRES_PASSWORD}} dropdb -h /tmp/cloudsql/${{env.CLOUDSQL_CONNECTION_NAME}} -U postgres ${{env.DB_NAME}} -f + kill $PID + - name: Create User and Database for Hasura run: | - HASURA_DB_PASSWORD=$(cat /dev/urandom | tr -cd [:alnum:] | head -c 16) - echo "HASURA_DB_PASSWORD=${HASURA_DB_PASSWORD}" >> $GITHUB_ENV - gcloud -q sql users create hasura-pr-${{ github.event.number }} -i ${CLOUDSQL_INSTANCE_NAME} --password ${HASURA_DB_PASSWORD} - gcloud -q sql databases create hasura-pr-${{ github.event.number }} -i ${CLOUDSQL_INSTANCE_NAME} + gcloud -q sql users create ${{env.DB_NAME}} -i ${{env.CLOUDSQL_INSTANCE_NAME}} --password ${{env.DB_PASSWORD}} + gcloud -q sql databases create ${{env.DB_NAME}} -i ${{env.CLOUDSQL_INSTANCE_NAME}} + + - name: Undeploy Backend + continue-on-error: true + run: gcloud -q run services delete ${{env.BACKEND_SERVICE}} --region ${{env.CLOUDRUN_REGION}} + + - name: Delete Backend Image + continue-on-error: true + run: gcloud -q artifacts docker images delete ${{env.BACKEND_TAG}} - name: Build Backend Container uses: mattes/cached-docker-build-action@v1 with: - args: ". -f ./docker/backend/Dockerfile --tag ${REGISTRY_REGION}-docker.pkg.dev/${PROJECT_ID}/${REGISTRY_REPO}/backend:pr-${{ github.event.number }} --build-arg GRAPHQL_HOST=hasura-pr-${{ github.event.number }}-${CLOUDRUN_SUFFIX} --build-arg GRAPHQL_DOMAIN=a.run.app" - cache_key: "${{ hashFiles('**/lockfiles') }}" + args: ". -f ./docker/backend/Dockerfile --tag ${{env.BACKEND_TAG}} --build-arg GRAPHQL_URL=${{env.GRAPHQL_URL}}" + cache_key: "${{hashFiles('packages/backend/**')}}" - name: Push Backend Container - run: docker push ${REGISTRY_REGION}-docker.pkg.dev/${PROJECT_ID}/${REGISTRY_REPO}/backend:pr-${{ github.event.number }} + run: docker push ${{env.BACKEND_TAG}} - name: Deploy Backend run: | - gcloud -q run deploy backend-pr-${{ github.event.number }} \ - --image ${REGISTRY_REGION}-docker.pkg.dev/${PROJECT_ID}/${REGISTRY_REPO}/backend:pr-${{ github.event.number }} \ - --region ${CLOUDRUN_REGION} \ - --port 4000 \ + gcloud -q run deploy ${{env.BACKEND_SERVICE}} \ + --image ${{env.BACKEND_TAG}} \ + --region ${{env.CLOUDRUN_REGION}} \ + --port ${{env.BACKEND_PORT}} \ --cpu 1 \ --memory 512Mi \ --ingress all \ --allow-unauthenticated \ --max-instances 1 \ - --set-env-vars HASURA_GRAPHQL_ADMIN_SECRET=metagame_secret,GITHUB_API_TOKEN=${{ secrets.GH_API_TOKEN }},SOURCECRED_LEDGER_BRANCH=master + --set-env-vars HASURA_GRAPHQL_ADMIN_SECRET=metagame_secret \ + --set-env-vars GITHUB_API_TOKEN=${{secrets.GH_API_TOKEN}} \ + --set-env-vars SOURCECRED_LEDGER_BRANCH=master \ + --set-env-vars GRAPHQL_URL=${{env.GRAPHQL_URL}} + + - name: Undeploy Hasura + continue-on-error: true + run: gcloud -q run services delete ${{env.HASURA_SERVICE}} --region ${{env.CLOUDRUN_REGION}} + + - name: Delete Hasura Image + continue-on-error: true + run: gcloud -q artifacts docker images delete ${{env.HASURA_TAG}} - name: Build Hasura Container uses: mattes/cached-docker-build-action@v1 with: - args: "./hasura -f ./hasura/Dockerfile --tag ${REGISTRY_REGION}-docker.pkg.dev/${PROJECT_ID}/${REGISTRY_REPO}/hasura:pr-${{ github.event.number }} --build-arg BACKEND_HOST=backend-pr-${{ github.event.number }}-${CLOUDRUN_SUFFIX}.a.run.app --build-arg BACKEND_PROTOCOL=https" - cache_key: "${{ hashFiles('**/lockfiles') }}" + args: "./hasura -f ./hasura/Dockerfile --tag ${{env.HASURA_TAG}} --build-arg BACKEND_HOST=${{env.BACKEND_HOST}} --build-arg BACKEND_PROTOCOL=https" + cache_key: "${{hashFiles('hasura/**')}}" - name: Push Hasura Container - run: docker push ${REGISTRY_REGION}-docker.pkg.dev/${PROJECT_ID}/${REGISTRY_REPO}/hasura:pr-${{ github.event.number }} + run: docker push ${{env.HASURA_TAG}} - name: Deploy Hasura run: | - gcloud -q run deploy hasura-pr-${{ github.event.number }} \ - --image ${REGISTRY_REGION}-docker.pkg.dev/${PROJECT_ID}/${REGISTRY_REPO}/hasura:pr-${{ github.event.number }} \ - --region ${CLOUDRUN_REGION} \ - --port 8080 \ + gcloud -q run deploy ${{env.HASURA_SERVICE}} \ + --image ${{env.HASURA_TAG}} \ + --region ${{env.CLOUDRUN_REGION}} \ + --port ${{env.HASURA_PORT}} \ --cpu 1 \ --memory 512Mi \ --ingress all \ --allow-unauthenticated \ - --add-cloudsql-instances metagame-thegame:us-east4:thegame \ + --add-cloudsql-instances ${{env.CLOUDSQL_CONNECTION_NAME}} \ --max-instances 1 \ - --set-env-vars HASURA_GRAPHQL_DATABASE_URL=postgres://hasura-pr-${{ github.event.number }}:${HASURA_DB_PASSWORD}@/hasura-pr-${{ github.event.number }}?host=/cloudsql/${CLOUDSQL_CONNECTION_NAME},HASURA_GRAPHQL_ADMIN_SECRET=metagame_secret,HASURA_GRAPHQL_SERVER_PORT=8080,HASURA_GRAPHQL_ENABLE_CONSOLE=true + --set-env-vars HASURA_GRAPHQL_DATABASE_URL=postgres://${{env.DB_NAME}}:${{env.DB_PASSWORD}}@/${{env.DB_NAME}}?host=/cloudsql/${{env.CLOUDSQL_CONNECTION_NAME}} \ + --set-env-vars HASURA_GRAPHQL_ADMIN_SECRET=metagame_secret \ + --set-env-vars HASURA_GRAPHQL_SERVER_PORT=${{env.HASURA_PORT}} \ + --set-env-vars HASURA_GRAPHQL_ENABLE_CONSOLE=true + + - name: Undeploy Frontend + continue-on-error: true + run: gcloud -q run services delete ${{env.FRONTEND_SERVICE}} --region ${{env.CLOUDRUN_REGION}} + + - name: Delete Frontend Image + continue-on-error: true + run: gcloud -q artifacts docker images delete ${{env.FRONTEND_TAG}} - name: Build Frontend Container uses: mattes/cached-docker-build-action@v1 with: - args: ". -f ./docker/frontend/Dockerfile --tag ${REGISTRY_REGION}-docker.pkg.dev/${PROJECT_ID}/${REGISTRY_REPO}/frontend:pr-${{ github.event.number }} --build-arg GRAPHQL_HOST=hasura-pr-${{ github.event.number }}-${CLOUDRUN_SUFFIX} --build-arg GRAPHQL_DOMAIN=a.run.app" - cache_key: "${{ hashFiles('**/lockfiles') }}" + args: ". -f ./docker/frontend/Dockerfile --tag ${{env.FRONTEND_TAG}} --build-arg GRAPHQL_URL=${{env.GRAPHQL_URL}}" + cache_key: "${{hashFiles('packages/web/**', 'packages/design-system/**')}}" - name: Push Frontend Container - run: docker push ${REGISTRY_REGION}-docker.pkg.dev/${PROJECT_ID}/${REGISTRY_REPO}/frontend:pr-${{ github.event.number }} + run: docker push ${{env.FRONTEND_TAG}} - name: Deploy Frontend run: | - gcloud -q run deploy frontend-pr-${{ github.event.number }} \ - --image ${REGISTRY_REGION}-docker.pkg.dev/${PROJECT_ID}/${REGISTRY_REPO}/frontend:pr-${{ github.event.number }} \ - --region ${CLOUDRUN_REGION} \ - --port 3000 \ + gcloud -q run deploy ${{env.FRONTEND_SERVICE}} \ + --image ${{env.FRONTEND_TAG}} \ + --region ${{env.CLOUDRUN_REGION}} \ + --port ${{env.FRONTEND_PORT}} \ --cpu 1 \ --memory 512Mi \ --ingress all \ --max-instances 1 \ - --allow-unauthenticated + --allow-unauthenticated \ + --set-env-vars NEXT_PUBLIC_GRAPHQL_URL=${{env.GRAPHQL_URL}} \ + --set-env-vars WEB3_STORAGE_TOKEN=${{secrets.WEB3_STORAGE_TOKEN}} \ + --set-env-vars NEXT_PUBLIC_IMGIX_TOKEN=${{secrets.IMGIX_TOKEN}} - name: Seed Database run: | mv package.json package.json.temp - npm install --no-package-lock --no-save node-fetch@2.6.1 fake-tag@3.0.0 + npm install --no-package-lock --no-save node-fetch bottleneck mv package.json.temp package.json - LOCAL_GRAPHQL_URL="https://hasura-pr-${{ github.event.number }}-${CLOUDRUN_SUFFIX}.a.run.app/v1/graphql" LOCAL_BACKEND_ACCOUNT_MIGRATION_URL="https://backend-pr-${{ github.event.number }}-${CLOUDRUN_SUFFIX}.a.run.app/actions/migrateSourceCredAccounts?force=true" yarn hasura:seed-local-db + LOCAL_GRAPHQL_URL="${{env.GRAPHQL_URL}}" \ + LOCAL_BACKEND_ACCOUNT_MIGRATION_URL="${{env.SC_MIGRATE_URL}}" \ + yarn hasura:seed-local-db - name: Comment on Pull Request uses: thollander/actions-comment-pull-request@v1 with: message: | - Successfully deployed a Preview of this Pull Request - [Frontend](https://frontend-pr-${{ github.event.number }}-${{ env.CLOUDRUN_SUFFIX }}.a.run.app) - [Hasura](https://hasura-pr-${{ github.event.number }}-${{ env.CLOUDRUN_SUFFIX }}.a.run.app) - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + Successfully deployed a preview of this pull request: + + * [Frontend](${{env.FRONTEND_URL}}) + * [Hasura](//${{env.HASURA_HOST}}) + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} diff --git a/.gitignore b/.gitignore index c19ccba49e..83e241a52e 100644 --- a/.gitignore +++ b/.gitignore @@ -51,6 +51,8 @@ yarn-error.log* .vercel tmp/ +tsconfig.tsbuildinfo + # Ceramic daemon ipfs packages/backend/uploads/ diff --git a/.nvmrc b/.nvmrc index dd457fd1c1..53d838af21 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -12.20.2 +lts/gallium diff --git a/docker/backend/Dockerfile b/docker/backend/Dockerfile index 3f3e92c562..88cba279cb 100644 --- a/docker/backend/Dockerfile +++ b/docker/backend/Dockerfile @@ -1,4 +1,4 @@ -FROM node:14-slim as base +FROM node:16-slim as base WORKDIR /usr/src/app # Install dependencies not included in the slim image @@ -26,11 +26,13 @@ COPY packages/discord-bot ./packages/discord-bot/ COPY packages/@types ./packages/@types/ # Set env vars -ARG GRAPHQL_HOST +ARG GRAPHQL_HOST=hasura ARG GRAPHQL_DOMAIN=onrender.com +ARG GRAPHQL_URL=https://$GRAPHQL_HOST.$GRAPHQL_DOMAIN/v1/graphql -ENV GRAPHQL_URL https://$GRAPHQL_HOST.$GRAPHQL_DOMAIN/v1/graphql +ENV GRAPHQL_URL $GRAPHQL_URL ENV HASURA_GRAPHQL_ADMIN_SECRET metagame_secret +ENV CERAMIC_URL https://ceramic.metagame.wtf # Build RUN yarn backend:build @@ -39,7 +41,7 @@ RUN yarn backend:build RUN yarn install --pure-lockfile --production --ignore-scripts --prefer-offline # Create completely new stage including only necessary files -FROM node:14-alpine as app +FROM node:16-alpine as app WORKDIR /app # Copy necessary files into the stage diff --git a/docker/discord-bot/Dockerfile b/docker/discord-bot/Dockerfile index 54f077735d..b108190406 100644 --- a/docker/discord-bot/Dockerfile +++ b/docker/discord-bot/Dockerfile @@ -1,4 +1,4 @@ -FROM node:12-slim as base +FROM node:16-slim as base WORKDIR /usr/src/app # Install dependencies not included in the slim image @@ -36,7 +36,7 @@ RUN yarn discord-bot build RUN yarn install --pure-lockfile --production --ignore-scripts --prefer-offline # Create completely new stage including only necessary files -FROM node:12-alpine as app +FROM node:16-alpine as app WORKDIR /app # Needed at runtime diff --git a/docker/frontend/Dockerfile b/docker/frontend/Dockerfile index 694e4cbc7f..30420619c3 100644 --- a/docker/frontend/Dockerfile +++ b/docker/frontend/Dockerfile @@ -1,4 +1,4 @@ -FROM node:14-slim as base +FROM node:16-slim as base WORKDIR /usr/src/app # Install dependencies not included in the slim image @@ -30,10 +30,11 @@ COPY packages/design-system ./packages/design-system/ COPY packages/@types ./packages/@types/ # Set env vars -ARG GRAPHQL_HOST +ARG GRAPHQL_HOST=hasura ARG GRAPHQL_DOMAIN=onrender.com +ARG GRAPHQL_URL=https://$GRAPHQL_HOST.$GRAPHQL_DOMAIN/v1/graphql -ENV NEXT_PUBLIC_GRAPHQL_URL https://$GRAPHQL_HOST.$GRAPHQL_DOMAIN/v1/graphql +ENV NEXT_PUBLIC_GRAPHQL_URL $GRAPHQL_URL # Build RUN yarn web:build @@ -42,7 +43,7 @@ RUN yarn web:build RUN yarn install --pure-lockfile --production --ignore-scripts --prefer-offline # Create completely new stage including only necessary files -FROM node:14-alpine as app +FROM node:16-alpine as app WORKDIR /app # Copy necessary files into the stage diff --git a/guides/FRONTEND.md b/guides/FRONTEND.md index d52933670d..7d26c490cc 100644 --- a/guides/FRONTEND.md +++ b/guides/FRONTEND.md @@ -4,12 +4,11 @@ In order to develop MetaGame FrontEnd components, you need. -Node.js (v14 works, and later versions have some issues) +- Node.js (the current setup is using lts/gallium (16.13.1)) -- Yarn (1.15.2 as some later versions throw errors) - to set a version for yarn eg: `yarn policies set-version 1.15.2` +- Yarn _(v1.19.0 as some later versions throw errors when adding dependencies)_ _(to set yarn's version: `yarn policies set-version 1.19.0`)_ -You can either choose to only develop the frontend and use our deployed backend services, or run the services on your own machines. If you're about to only work on UI, it's much quicker to connect to the remote servers. +You can either choose to only develop the frontend and use our deployed backend services, or run the services on your own machines. If you're only going to work on UI, it's much quicker to connect to the remote servers. ### Install dependencies @@ -19,12 +18,16 @@ yarn ### Connecting to remote servers -To connect to staging servers, you'll need to create a file `.env.local` under `packages/web` and write this inside: +To connect to staging servers, you'll need to create a file `.env` under `packages/web/` and write this inside: -``` +```bash NEXT_PUBLIC_GRAPHQL_URL=https://api-staging.metagame.wtf/v1/graphql ``` +### Enabling profile image editing + +The storage of images associated with a profile is in [web3.storage](//web3.storage). In order to be able to connect, you will need to create an API token and save it in `packages/web/.env` as `WEB3_STORAGE_TOKEN`. + ### Connecting to local server If you want to connect to locally running backend services, you will need to be able to start the backend with: @@ -33,6 +36,12 @@ If you want to connect to locally running backend services, you will need to be yarn docker:start ``` +Or, if you prefer to be able to see the output from the backend: + +```bash +docker-compose up --build +``` + If you have difficulties running the backend, checkout the [Backend Guide](BACKEND.md). ## Starting web app in dev mode @@ -43,18 +52,18 @@ Once you are ready to connect to the backend, you can start the web app by using yarn web:dev ``` -Go to [http://localhost:3000](http://localhost:3000) +Go to [localhost:3000](http://localhost:3000). Happy Coding! ## Creating GraphQL queries -In both the `web` and `metamaps` folder, all GraphQL queries are hosted in a folder called `graphql`. +The GraphQL queries are created by running, first, `yarn update-schema` which exports a GraphQL schema from Hasura, then, `yarn generate` which combines that schema with additional programmer-provided queries and mutations. -``` -web/graphql -metamaps/graphql -``` +The programmer contributions can be found at: + +- `packages/web/graphql/` +- `packages/backend/src/handlers/graphql/` Related queries and mutations should be in the same file. @@ -63,9 +72,7 @@ Related queries and mutations should be in the same file. Queries are to pull and retrieve data from the database. In the following example we will be using the `Map` table as an example. ```typescript -import gql from 'fake-tag'; - -export const GetMapQuery = gql` +export const GetMapQuery = /* GraphQL */ ` query GetMap($id: uuid!) { Map_by_pk(id: $id) { id @@ -83,7 +90,7 @@ In the above example we retrieved a map by id. There are several key things you ```graphql query GetMap($id: uuid!) { - ... + ⋮ ``` We provided a `$id`. It is important that all queries and mutations have parameters that are prefixed with a `$`. @@ -107,7 +114,7 @@ Specifying these columns are important depending on what data you need to pull t Map_by_pk(id: $id) ``` -You can actually review all queries and mutations available in Hasura on `http://localhost:9695`. +You can actually review all queries and mutations available in Hasura on `http://localhost:9695` by running `yarn hasura console`. ### Integrating the GraphQL query @@ -175,9 +182,7 @@ Keep in mind the following parts of the query state: GraphQL mutations are used to update and insert new data into the database. The following is an example of inserting data into the `Map` table. ```typescript -import gql from 'fake-tag'; - -export const CreateMap = gql` +export const CreateMap = /* GraphQL */ ` mutation createMap($author: String!, $name: String!) { insert_Map_one(object: { author_address: $author, name: $name, data: "" }) { id diff --git a/hasura/Dockerfile b/hasura/Dockerfile index 843c2a6fea..e3c8b8579c 100644 --- a/hasura/Dockerfile +++ b/hasura/Dockerfile @@ -5,12 +5,13 @@ RUN chmod +x /wait ## Default setup -ARG BACKEND_HOST +ARG BACKEND_HOST=localhost ARG BACKEND_PROTOCOL=http ARG AUTH_HOOK_PATH=auth-webhook ARG ACTION_BASE_PATH=actions ARG REMOTE_SCHEMA_PATH=remote-schemas/graphql ARG TRIGGERS_PATH=triggers +ARG HASURA_GRAPHQL_SERVER_PORT=8080 ENV HASURA_GRAPHQL_DEV_MODE false ENV HASURA_GRAPHQL_ENABLE_TELEMETRY false diff --git a/hasura/metadata/actions.graphql b/hasura/metadata/actions.graphql index 8d04d7c475..f53112eb67 100644 --- a/hasura/metadata/actions.graphql +++ b/hasura/metadata/actions.graphql @@ -67,23 +67,23 @@ enum GuildType_ActionEnum { } input CreateQuestInput { - guild_id : uuid! + guildId : uuid! title : String! description : String - external_link : String + externalLink : String repetition : QuestRepetition_ActionEnum cooldown : Int - skills_id : [uuid]! + skillsId : [uuid]! } input CreateQuestCompletionInput { - quest_id : String! - submission_link : String - submission_text : String + questId : String! + submissionLink : String + submissionText : String } input UpdateQuestCompletionInput { - quest_completion_id : String! + questCompletionId : String! status : QuestCompletionStatus_ActionEnum! } @@ -93,21 +93,25 @@ input GuildInfo { name : String! type : GuildType_ActionEnum! description : String - discordInviteUrl : String - joinUrl : String - logoUrl : String - websiteUrl : String - twitterUrl : String - githubUrl : String + discordInviteURL : String + joinURL : String + logoURL : String + websiteURL : String + twitterURL : String + githubURL : String daoAddress : String membershipThroughDiscord : Boolean discordAdminRoles : [String]! discordMembershipRoles : [String]! } -type UpdateBoxProfileResponse { +type UpdateIDXProfileResponse { success : Boolean! - updatedProfiles : [String!]! + ceramic : String! + did : String + ethereumAddress : String + accountLinks : [String] + fields : [String] } type CreateQuestOutput { @@ -137,14 +141,10 @@ type DiscordGuildAuthResponse { type CacheProcessOutput { success : Boolean! + queued : Boolean! error : String } -type UpdateIDXProfileResponse { - success : Boolean! - updatedProfiles : [String!]! -} - type ExpiredPlayerProfiles { ids : [String]! } diff --git a/hasura/metadata/actions.yaml b/hasura/metadata/actions.yaml index 8e84ea89e2..d67b3ed112 100644 --- a/hasura/metadata/actions.yaml +++ b/hasura/metadata/actions.yaml @@ -39,6 +39,7 @@ actions: forward_client_headers: true permissions: - role: player + - role: public - name: updateQuestCompletion definition: kind: synchronous @@ -90,7 +91,7 @@ custom_types: - name: UpdateQuestCompletionInput - name: GuildInfo objects: - - name: UpdateBoxProfileResponse + - name: UpdateIDXProfileResponse - name: CreateQuestOutput relationships: - remote_table: @@ -120,7 +121,6 @@ custom_types: quest_completion_id: id - name: DiscordGuildAuthResponse - name: CacheProcessOutput - - name: UpdateIDXProfileResponse - name: ExpiredPlayerProfiles - name: SaveGuildResponse scalars: [] diff --git a/hasura/metadata/tables.yaml b/hasura/metadata/tables.yaml index e1f8544669..8882fca58d 100644 --- a/hasura/metadata/tables.yaml +++ b/hasura/metadata/tables.yaml @@ -6,13 +6,13 @@ schema: public name: ColorAspect array_relationships: - - name: players + - name: profiles using: foreign_key_constraint_on: column: color_mask table: schema: public - name: player + name: profile select_permissions: - role: player permission: @@ -28,6 +28,38 @@ - description - name filter: {} +- table: + schema: public + name: ExplorerType + configuration: + custom_root_fields: {} + custom_column_names: + image_url: imageURL + array_relationships: + - name: profiles + using: + foreign_key_constraint_on: + column: explorer_type_title + table: + schema: public + name: profile + select_permissions: + - role: player + permission: + columns: + - description + - id + - image_url + - title + filter: {} + - role: public + permission: + columns: + - id + - description + - image_url + - title + filter: {} - table: schema: public name: GuildPosition @@ -275,6 +307,10 @@ - table: schema: public name: me + configuration: + custom_root_fields: {} + custom_column_names: + ethereum_address: ethereumAddress object_relationships: - name: player using: @@ -297,21 +333,25 @@ - table: schema: public name: player + configuration: + custom_root_fields: {} + custom_column_names: + ethereum_address: ethereumAddress + discord_id: discordId + updated_at: updatedAt + season_xp: seasonXP + created_at: createdAt + total_xp: totalXP + profile_layout: profileLayout object_relationships: - - name: color_aspect - using: - foreign_key_constraint_on: color_mask - - name: profile_cache + - name: profile using: manual_configuration: remote_table: schema: public - name: profile_cache + name: profile column_mapping: id: player_id - - name: type - using: - foreign_key_constraint_on: player_type_id array_relationships: - name: accounts using: @@ -356,15 +396,6 @@ schema: public name: player_skill remote_relationships: - - definition: - remote_field: - getBoxProfile: - arguments: - address: $ethereum_address - hasura_fields: - - ethereum_address - remote_schema: backend - name: box_profile - definition: remote_field: getDaoHausMemberships: @@ -396,50 +427,33 @@ - role: player permission: columns: - - availability_hours - - color_mask - ethereum_address - id - - player_type_id - profile_layout - - pronouns - rank - role - season_xp - - timezone - total_xp - - username filter: {} allow_aggregations: true - role: public permission: columns: - - availability_hours - - color_mask - ethereum_address - id - profile_layout - - pronouns - rank - role - season_xp - - timezone - total_xp - - username filter: {} allow_aggregations: true update_permissions: - role: player permission: columns: - - availability_hours - - color_mask - - player_type_id - profile_layout - - pronouns - role - - timezone - - username filter: id: _eq: X-Hasura-User-Id @@ -474,6 +488,10 @@ - table: schema: public name: player_account + configuration: + custom_root_fields: {} + custom_column_names: + player_id: playerId object_relationships: - name: Player using: @@ -596,34 +614,27 @@ _eq: X-Hasura-User-Id - table: schema: public - name: player_type - select_permissions: - - role: player - permission: - columns: - - description - - id - - imageUrl - - title - filter: {} - - role: public - permission: - columns: - - id - - description - - imageUrl - - title - filter: {} -- table: - schema: public - name: profile_cache + name: profile configuration: custom_root_fields: {} custom_column_names: + profile_image_url: profileImageURL + country_code: countryCode background_image_url: backgroundImageURL - image_url: imageURL + last_checked_at: lastCheckedAt + banner_image_url: bannerImageURL + color_mask: colorMask player_id: playerId + time_zone: timeZone + available_hours: availableHours + explorer_type_title: explorerTypeTitle object_relationships: + - name: colorAspect + using: + foreign_key_constraint_on: color_mask + - name: explorerType + using: + foreign_key_constraint_on: explorer_type_title - name: player using: foreign_key_constraint_on: player_id @@ -631,42 +642,65 @@ - role: player permission: columns: - - name + - available_hours + - background_image_url + - banner_image_url + - color_mask + - country_code - description - - location - emoji - - image_url - - background_image_url - - website + - explorer_type_title - gender - id + - location + - name - player_id + - profile_image_url + - pronouns + - time_zone + - username + - website filter: {} - role: public permission: columns: + - available_hours - background_image_url + - banner_image_url + - color_mask + - country_code - description - emoji + - explorer_type_title - gender - - image_url + - id - location - name - - website - - id - player_id + - profile_image_url + - pronouns + - time_zone + - username + - website filter: {} update_permissions: - role: player permission: columns: + - available_hours - background_image_url + - banner_image_url + - color_mask + - country_code - description - emoji - gender - - image_url - location - name + - profile_image_url + - pronouns + - time_zone + - username - website filter: player_id: @@ -675,6 +709,13 @@ - table: schema: public name: quest + configuration: + custom_root_fields: {} + custom_column_names: + external_link: externalLink + created_by_player_id: createdByPlayerId + created_at: createdAt + guild_id: guildId object_relationships: - name: guild using: @@ -763,6 +804,14 @@ - table: schema: public name: quest_completion + configuration: + custom_root_fields: {} + custom_column_names: + quest_id: questId + submission_link: submissionLink + submission_text: submissionText + completed_by_player_id: completedByPlayerId + submitted_at: submittedAt object_relationships: - name: player using: @@ -812,6 +861,11 @@ - table: schema: public name: quest_skill + configuration: + custom_root_fields: {} + custom_column_names: + quest_id: questId + skill_id: skillId object_relationships: - name: quest using: diff --git a/hasura/migrations/1597557008749_sourcecred-migration/up.sql b/hasura/migrations/1597557008749_sourcecred-migration/up.sql index 368de749d5..88c53e4f5f 100644 --- a/hasura/migrations/1597557008749_sourcecred-migration/up.sql +++ b/hasura/migrations/1597557008749_sourcecred-migration/up.sql @@ -1,7 +1,17 @@ -ALTER TABLE "public"."Player" ALTER COLUMN "ethereum_address" DROP NOT NULL; +ALTER TABLE "public"."Player" + ALTER COLUMN "ethereum_address" + DROP NOT NULL +; -ALTER TABLE "public"."Player" ADD COLUMN "scIdentityId" Text NULL UNIQUE; +ALTER TABLE "public"."Player" + ADD COLUMN "scIdentityId" + Text NULL UNIQUE +; -alter table "public"."Account" drop constraint "Account_identifier_unique_key"; -alter table "public"."Account" add constraint "Account_identifier_type_key" unique ("identifier", "type"); +ALTER TABLE "public"."Account" + DROP CONSTRAINT "Account_identifier_unique_key"; +ALTER TABLE "public"."Account" + ADD CONSTRAINT "Account_identifier_type_key" + UNIQUE ("identifier", "type") +; diff --git a/hasura/migrations/1638233855361_set_fk_public_player_account_player_id/down.sql b/hasura/migrations/1638233855361_set_fk_public_player_account_player_id/down.sql new file mode 100644 index 0000000000..77faeece34 --- /dev/null +++ b/hasura/migrations/1638233855361_set_fk_public_player_account_player_id/down.sql @@ -0,0 +1,8 @@ +ALTER TABLE public.player_account + DROP CONSTRAINT player_account_player_id_fkey, + ADD CONSTRAINT player_account_player_id_fkey + FOREIGN KEY (player_id) + REFERENCES public.player(id) + ON UPDATE NO ACTION + ON DELETE CASCADE +; diff --git a/hasura/migrations/1638233855361_set_fk_public_player_account_player_id/up.sql b/hasura/migrations/1638233855361_set_fk_public_player_account_player_id/up.sql new file mode 100644 index 0000000000..760dfe5940 --- /dev/null +++ b/hasura/migrations/1638233855361_set_fk_public_player_account_player_id/up.sql @@ -0,0 +1,7 @@ +ALTER TABLE public.player_account + DROP CONSTRAINT player_account_player_id_fkey, + ADD CONSTRAINT player_account_player_id_fkey + FOREIGN KEY (player_id) + REFERENCES public.player(id) + ON UPDATE NO ACTION ON DELETE NO ACTION +; diff --git a/hasura/migrations/1638256399268_alter_table_public_player_alter_column_availability_hours/down.sql b/hasura/migrations/1638256399268_alter_table_public_player_alter_column_availability_hours/down.sql new file mode 100644 index 0000000000..7f26a00e40 --- /dev/null +++ b/hasura/migrations/1638256399268_alter_table_public_player_alter_column_availability_hours/down.sql @@ -0,0 +1 @@ +alter table "public"."player" rename column "available_hours" to "availability_hours"; diff --git a/hasura/migrations/1638256399268_alter_table_public_player_alter_column_availability_hours/up.sql b/hasura/migrations/1638256399268_alter_table_public_player_alter_column_availability_hours/up.sql new file mode 100644 index 0000000000..50ca1ace85 --- /dev/null +++ b/hasura/migrations/1638256399268_alter_table_public_player_alter_column_availability_hours/up.sql @@ -0,0 +1 @@ +alter table "public"."player" rename column "availability_hours" to "available_hours"; diff --git a/hasura/migrations/1638333712988_alter_table_public_profile_cache_alter_column_country/down.sql b/hasura/migrations/1638333712988_alter_table_public_profile_cache_alter_column_country/down.sql new file mode 100644 index 0000000000..72d1aa374a --- /dev/null +++ b/hasura/migrations/1638333712988_alter_table_public_profile_cache_alter_column_country/down.sql @@ -0,0 +1 @@ +alter table "public"."profile_cache" rename column "country_code" to "country"; diff --git a/hasura/migrations/1638333712988_alter_table_public_profile_cache_alter_column_country/up.sql b/hasura/migrations/1638333712988_alter_table_public_profile_cache_alter_column_country/up.sql new file mode 100644 index 0000000000..2e81f16dd7 --- /dev/null +++ b/hasura/migrations/1638333712988_alter_table_public_profile_cache_alter_column_country/up.sql @@ -0,0 +1 @@ +alter table "public"."profile_cache" rename column "country" to "country_code"; diff --git a/hasura/migrations/1639455871385_rename_table_public_profile_cache/down.sql b/hasura/migrations/1639455871385_rename_table_public_profile_cache/down.sql new file mode 100644 index 0000000000..fa091d30a5 --- /dev/null +++ b/hasura/migrations/1639455871385_rename_table_public_profile_cache/down.sql @@ -0,0 +1 @@ +alter table "public"."profile" rename to "profile_cache"; diff --git a/hasura/migrations/1639455871385_rename_table_public_profile_cache/up.sql b/hasura/migrations/1639455871385_rename_table_public_profile_cache/up.sql new file mode 100644 index 0000000000..887196c0a3 --- /dev/null +++ b/hasura/migrations/1639455871385_rename_table_public_profile_cache/up.sql @@ -0,0 +1 @@ +alter table "public"."profile_cache" rename to "profile"; diff --git a/hasura/migrations/1639455903307_alter_table_public_profile_alter_column_background_image_url/down.sql b/hasura/migrations/1639455903307_alter_table_public_profile_alter_column_background_image_url/down.sql new file mode 100644 index 0000000000..01f921bdfa --- /dev/null +++ b/hasura/migrations/1639455903307_alter_table_public_profile_alter_column_background_image_url/down.sql @@ -0,0 +1 @@ +alter table "public"."profile" rename column "banner_image_url" to "background_image_url"; diff --git a/hasura/migrations/1639455903307_alter_table_public_profile_alter_column_background_image_url/up.sql b/hasura/migrations/1639455903307_alter_table_public_profile_alter_column_background_image_url/up.sql new file mode 100644 index 0000000000..22628b66d8 --- /dev/null +++ b/hasura/migrations/1639455903307_alter_table_public_profile_alter_column_background_image_url/up.sql @@ -0,0 +1 @@ +alter table "public"."profile" rename column "background_image_url" to "banner_image_url"; diff --git a/hasura/migrations/1639457137925_alter_table_public_profile_add_column_pronouns/down.sql b/hasura/migrations/1639457137925_alter_table_public_profile_add_column_pronouns/down.sql new file mode 100644 index 0000000000..9348a51af5 --- /dev/null +++ b/hasura/migrations/1639457137925_alter_table_public_profile_add_column_pronouns/down.sql @@ -0,0 +1,14 @@ +ALTER TABLE public.player + ADD COLUMN IF NOT EXISTS pronouns text NULL +; + +UPDATE player + SET pronouns = profile.pronouns + FROM profile + WHERE profile.player_id = player.id + AND player.pronouns IS NULL +; + +ALTER TABLE public.profile + DROP COLUMN pronouns +; diff --git a/hasura/migrations/1639457137925_alter_table_public_profile_add_column_pronouns/up.sql b/hasura/migrations/1639457137925_alter_table_public_profile_add_column_pronouns/up.sql new file mode 100644 index 0000000000..6b9c9df4a4 --- /dev/null +++ b/hasura/migrations/1639457137925_alter_table_public_profile_add_column_pronouns/up.sql @@ -0,0 +1,11 @@ +ALTER TABLE public.profile + ADD COLUMN IF NOT EXISTS pronouns text NULL +; + +-- A “pronouns” field was previously added to the root player table +UPDATE profile + SET pronouns = player.pronouns + FROM player + WHERE profile.player_id = player.id + AND profile.pronouns IS NULL +; diff --git a/hasura/migrations/1639457861149_alter_table_public_profile_add_column_background_image_url/down.sql b/hasura/migrations/1639457861149_alter_table_public_profile_add_column_background_image_url/down.sql new file mode 100644 index 0000000000..2ffebae4e9 --- /dev/null +++ b/hasura/migrations/1639457861149_alter_table_public_profile_add_column_background_image_url/down.sql @@ -0,0 +1 @@ +ALTER TABLE "public"."profile" DROP COLUMN "background_image_url"; diff --git a/hasura/migrations/1639457861149_alter_table_public_profile_add_column_background_image_url/up.sql b/hasura/migrations/1639457861149_alter_table_public_profile_add_column_background_image_url/up.sql new file mode 100644 index 0000000000..0aa055d9f6 --- /dev/null +++ b/hasura/migrations/1639457861149_alter_table_public_profile_add_column_background_image_url/up.sql @@ -0,0 +1 @@ +ALTER TABLE "public"."profile" ADD COLUMN "background_image_url" text NULL; diff --git a/hasura/migrations/1639458378073_set_fk_public_profile_player_id/down.sql b/hasura/migrations/1639458378073_set_fk_public_profile_player_id/down.sql new file mode 100644 index 0000000000..82a49202fb --- /dev/null +++ b/hasura/migrations/1639458378073_set_fk_public_profile_player_id/down.sql @@ -0,0 +1,7 @@ +alter table "public"."profile" drop constraint "profile_player_id_fkey", + add constraint "profile_cache_player_id_fkey" + foreign key ("player_id") + references "public"."player" + ("id") + on update cascade + on delete cascade; diff --git a/hasura/migrations/1639458378073_set_fk_public_profile_player_id/up.sql b/hasura/migrations/1639458378073_set_fk_public_profile_player_id/up.sql new file mode 100644 index 0000000000..03ad132d28 --- /dev/null +++ b/hasura/migrations/1639458378073_set_fk_public_profile_player_id/up.sql @@ -0,0 +1,5 @@ +alter table "public"."profile" drop constraint "profile_cache_player_id_fkey", + add constraint "profile_player_id_fkey" + foreign key ("player_id") + references "public"."player" + ("id") on update cascade on delete cascade; diff --git a/hasura/migrations/1639458403546_alter_table_public_profile_add_unique_player_id/down.sql b/hasura/migrations/1639458403546_alter_table_public_profile_add_unique_player_id/down.sql new file mode 100644 index 0000000000..8b9e5b00a0 --- /dev/null +++ b/hasura/migrations/1639458403546_alter_table_public_profile_add_unique_player_id/down.sql @@ -0,0 +1,2 @@ +alter table "public"."profile" drop constraint "profile_player_id_key"; +alter table "public"."profile" add constraint "profile_player_id_key" unique ("player_id"); diff --git a/hasura/migrations/1639458403546_alter_table_public_profile_add_unique_player_id/up.sql b/hasura/migrations/1639458403546_alter_table_public_profile_add_unique_player_id/up.sql new file mode 100644 index 0000000000..e7083eba9f --- /dev/null +++ b/hasura/migrations/1639458403546_alter_table_public_profile_add_unique_player_id/up.sql @@ -0,0 +1,2 @@ +alter table "public"."profile" drop constraint "profile_cache_player_id_key"; +alter table "public"."profile" add constraint "profile_player_id_key" unique ("player_id"); diff --git a/hasura/migrations/1639458853392_alter_table_public_profile_add_column_username/down.sql b/hasura/migrations/1639458853392_alter_table_public_profile_add_column_username/down.sql new file mode 100644 index 0000000000..ea4f76375f --- /dev/null +++ b/hasura/migrations/1639458853392_alter_table_public_profile_add_column_username/down.sql @@ -0,0 +1,10 @@ +UPDATE player + SET username = profile.username + FROM profile + WHERE profile.player_id = player.id + AND player.username IS NULL +; + +ALTER TABLE public.profile + DROP COLUMN username +; diff --git a/hasura/migrations/1639458853392_alter_table_public_profile_add_column_username/up.sql b/hasura/migrations/1639458853392_alter_table_public_profile_add_column_username/up.sql new file mode 100644 index 0000000000..980972c30c --- /dev/null +++ b/hasura/migrations/1639458853392_alter_table_public_profile_add_column_username/up.sql @@ -0,0 +1,11 @@ +ALTER TABLE public.profile + ADD COLUMN IF NOT EXISTS username text NULL UNIQUE +; + +-- A “username” field was previously added to the root player table +UPDATE profile + SET username = player.username + FROM player + WHERE profile.player_id = player.id + AND profile.username IS NULL +; diff --git a/hasura/migrations/1639476052965_alter_table_public_player_drop_column_pronouns/down.sql b/hasura/migrations/1639476052965_alter_table_public_player_drop_column_pronouns/down.sql new file mode 100644 index 0000000000..2e0fe884b3 --- /dev/null +++ b/hasura/migrations/1639476052965_alter_table_public_player_drop_column_pronouns/down.sql @@ -0,0 +1,2 @@ +ALTER TABLE "public"."player" ADD COLUMN "pronouns" text; +ALTER TABLE "public"."player" ALTER COLUMN "pronouns" DROP NOT NULL; diff --git a/hasura/migrations/1639476052965_alter_table_public_player_drop_column_pronouns/up.sql b/hasura/migrations/1639476052965_alter_table_public_player_drop_column_pronouns/up.sql new file mode 100644 index 0000000000..c6f2337d8f --- /dev/null +++ b/hasura/migrations/1639476052965_alter_table_public_player_drop_column_pronouns/up.sql @@ -0,0 +1 @@ +ALTER TABLE "public"."player" DROP COLUMN "pronouns" CASCADE; diff --git a/hasura/migrations/1639476452544_move_view_username_to_profile/up.sql b/hasura/migrations/1639476452544_move_view_username_to_profile/up.sql new file mode 100644 index 0000000000..aa8b185a74 --- /dev/null +++ b/hasura/migrations/1639476452544_move_view_username_to_profile/up.sql @@ -0,0 +1,7 @@ +CREATE OR REPLACE VIEW public.me AS + SELECT player.id, + profile.username, + player.ethereum_address + FROM player + JOIN profile + ON player.id = profile.player_id; diff --git a/hasura/migrations/1639476534860_alter_table_public_player_drop_column_username/down.sql b/hasura/migrations/1639476534860_alter_table_public_player_drop_column_username/down.sql new file mode 100644 index 0000000000..ca306c0bcc --- /dev/null +++ b/hasura/migrations/1639476534860_alter_table_public_player_drop_column_username/down.sql @@ -0,0 +1,3 @@ +ALTER TABLE "public"."player" ADD COLUMN "username" text; +ALTER TABLE "public"."player" ALTER COLUMN "username" DROP NOT NULL; +ALTER TABLE "public"."player" ADD CONSTRAINT Player_username_unique_key UNIQUE (username); diff --git a/hasura/migrations/1639476534860_alter_table_public_player_drop_column_username/up.sql b/hasura/migrations/1639476534860_alter_table_public_player_drop_column_username/up.sql new file mode 100644 index 0000000000..ddb98d00e5 --- /dev/null +++ b/hasura/migrations/1639476534860_alter_table_public_player_drop_column_username/up.sql @@ -0,0 +1 @@ +ALTER TABLE "public"."player" DROP COLUMN "username" CASCADE; diff --git a/hasura/migrations/1640057892300_alter_table_public_profile_alter_column_image_url/down.sql b/hasura/migrations/1640057892300_alter_table_public_profile_alter_column_image_url/down.sql new file mode 100644 index 0000000000..ffb0c84616 --- /dev/null +++ b/hasura/migrations/1640057892300_alter_table_public_profile_alter_column_image_url/down.sql @@ -0,0 +1 @@ +alter table "public"."profile" rename column "profile_image_url" to "image_url"; diff --git a/hasura/migrations/1640057892300_alter_table_public_profile_alter_column_image_url/up.sql b/hasura/migrations/1640057892300_alter_table_public_profile_alter_column_image_url/up.sql new file mode 100644 index 0000000000..05f5e4c76b --- /dev/null +++ b/hasura/migrations/1640057892300_alter_table_public_profile_alter_column_image_url/up.sql @@ -0,0 +1 @@ +alter table "public"."profile" rename column "image_url" to "profile_image_url"; diff --git a/hasura/migrations/1640078300535_move_avaialable_hours_column_to_profile/down.sql b/hasura/migrations/1640078300535_move_avaialable_hours_column_to_profile/down.sql new file mode 100644 index 0000000000..65871e4c16 --- /dev/null +++ b/hasura/migrations/1640078300535_move_avaialable_hours_column_to_profile/down.sql @@ -0,0 +1,15 @@ + +ALTER TABLE public.player + ADD COLUMN IF NOT EXISTS available_hours integer NULL +; + +UPDATE player + SET available_hours = profile.available_hours + FROM profile + WHERE profile.player_id = player.id + AND player.available_hours IS NULL +; + +ALTER TABLE public.profile + DROP COLUMN available_hours +; diff --git a/hasura/migrations/1640078300535_move_avaialable_hours_column_to_profile/up.sql b/hasura/migrations/1640078300535_move_avaialable_hours_column_to_profile/up.sql new file mode 100644 index 0000000000..fa92a56c64 --- /dev/null +++ b/hasura/migrations/1640078300535_move_avaialable_hours_column_to_profile/up.sql @@ -0,0 +1,19 @@ +ALTER TABLE public.profile + ADD COLUMN IF NOT EXISTS available_hours integer NULL +; + +-- needed once b/c down migration partially failed +ALTER TABLE public.player + ADD COLUMN IF NOT EXISTS available_hours integer NULL +; + +UPDATE profile + SET available_hours = player.available_hours + FROM player + WHERE profile.player_id = player.id + AND profile.available_hours IS NULL +; + +ALTER TABLE public.player + DROP COLUMN IF EXISTS available_hours CASCADE +; diff --git a/hasura/migrations/1640183267176_move_time_zone_to_profile_table/down.sql b/hasura/migrations/1640183267176_move_time_zone_to_profile_table/down.sql new file mode 100644 index 0000000000..bdd14fc0aa --- /dev/null +++ b/hasura/migrations/1640183267176_move_time_zone_to_profile_table/down.sql @@ -0,0 +1,15 @@ + +ALTER TABLE public.player + ADD COLUMN IF NOT EXISTS timezone integer NULL +; + +UPDATE player + SET timezone = profile.time_zone + FROM profile + WHERE profile.player_id = player.id + AND player.timezone IS NULL +; + +ALTER TABLE public.profile + DROP COLUMN IF EXISTS time_zone +; diff --git a/hasura/migrations/1640183267176_move_time_zone_to_profile_table/up.sql b/hasura/migrations/1640183267176_move_time_zone_to_profile_table/up.sql new file mode 100644 index 0000000000..6483e1a47a --- /dev/null +++ b/hasura/migrations/1640183267176_move_time_zone_to_profile_table/up.sql @@ -0,0 +1,19 @@ + +ALTER TABLE public.profile + ADD COLUMN IF NOT EXISTS time_zone text NULL +; + +ALTER TABLE public.player + ADD COLUMN IF NOT EXISTS timezone integer NULL +; + +UPDATE profile + SET time_zone = player.timezone + FROM player + WHERE profile.player_id = player.id + AND profile.time_zone IS NULL +; + +ALTER TABLE public.player + DROP COLUMN IF EXISTS timezone CASCADE +; diff --git a/hasura/migrations/1640506201289_move_color_mask_to_profile/down.sql b/hasura/migrations/1640506201289_move_color_mask_to_profile/down.sql new file mode 100644 index 0000000000..a945155c41 --- /dev/null +++ b/hasura/migrations/1640506201289_move_color_mask_to_profile/down.sql @@ -0,0 +1,24 @@ +ALTER TABLE public.player + ADD COLUMN IF NOT EXISTS color_mask int4 NULL +; + +ALTER TABLE public.player + ADD CONSTRAINT player_color_mask_fkey + FOREIGN KEY (color_mask) REFERENCES public."ColorAspect"(mask) + ON DELETE restrict ON UPDATE restrict +; + +UPDATE player + SET color_mask = profile.color_mask + FROM profile + WHERE profile.player_id = player.id + AND player.color_mask IS NULL +; + +ALTER TABLE public.profile + DROP CONSTRAINT profile_color_mask_fkey +; + +ALTER TABLE public.profile + DROP COLUMN IF EXISTS color_mask +; diff --git a/hasura/migrations/1640506201289_move_color_mask_to_profile/up.sql b/hasura/migrations/1640506201289_move_color_mask_to_profile/up.sql new file mode 100644 index 0000000000..02f219c42d --- /dev/null +++ b/hasura/migrations/1640506201289_move_color_mask_to_profile/up.sql @@ -0,0 +1,33 @@ +ALTER TABLE public.profile + ADD COLUMN IF NOT EXISTS color_mask int4 NULL +; + +ALTER TABLE public.player + ADD COLUMN IF NOT EXISTS color_mask int4 NULL +; + +ALTER TABLE public.profile + DROP CONSTRAINT IF EXISTS profile_color_mask_fkey +; + +ALTER TABLE public.profile + ADD CONSTRAINT profile_color_mask_fkey + FOREIGN KEY (color_mask) + REFERENCES public."ColorAspect"(mask) + ON UPDATE restrict ON DELETE restrict +; + +UPDATE profile + SET color_mask = player.color_mask + FROM player + WHERE profile.player_id = player.id + AND profile.color_mask IS NULL +; + +ALTER TABLE public.player + DROP CONSTRAINT IF EXISTS player_color_mask_fkey +; + +ALTER TABLE public.player + DROP COLUMN IF EXISTS color_mask CASCADE +; diff --git a/hasura/migrations/1640517316534_case_insensitive_username_uniqueness/down.sql b/hasura/migrations/1640517316534_case_insensitive_username_uniqueness/down.sql new file mode 100644 index 0000000000..6c7d6823e2 --- /dev/null +++ b/hasura/migrations/1640517316534_case_insensitive_username_uniqueness/down.sql @@ -0,0 +1 @@ +DROP INDEX username_insensitive_unique_idx; diff --git a/hasura/migrations/1640517316534_case_insensitive_username_uniqueness/up.sql b/hasura/migrations/1640517316534_case_insensitive_username_uniqueness/up.sql new file mode 100644 index 0000000000..7207dd37d6 --- /dev/null +++ b/hasura/migrations/1640517316534_case_insensitive_username_uniqueness/up.sql @@ -0,0 +1,3 @@ +CREATE UNIQUE INDEX username_insensitive_unique_idx + ON profile(LOWER(username)) +; diff --git a/hasura/migrations/1640532625675_rename_table_public_player_type/down.sql b/hasura/migrations/1640532625675_rename_table_public_player_type/down.sql new file mode 100644 index 0000000000..47b5d395ff --- /dev/null +++ b/hasura/migrations/1640532625675_rename_table_public_player_type/down.sql @@ -0,0 +1,3 @@ +ALTER TABLE public."ExplorerType" + RENAME TO player_type +; diff --git a/hasura/migrations/1640532625675_rename_table_public_player_type/up.sql b/hasura/migrations/1640532625675_rename_table_public_player_type/up.sql new file mode 100644 index 0000000000..2c6a4ea483 --- /dev/null +++ b/hasura/migrations/1640532625675_rename_table_public_player_type/up.sql @@ -0,0 +1,3 @@ +ALTER TABLE public.player_type + RENAME TO "ExplorerType" +; diff --git a/hasura/migrations/1640532635456_alter_table_public_ExplorerType_alter_column_imageUrl/down.sql b/hasura/migrations/1640532635456_alter_table_public_ExplorerType_alter_column_imageUrl/down.sql new file mode 100644 index 0000000000..f01388cc24 --- /dev/null +++ b/hasura/migrations/1640532635456_alter_table_public_ExplorerType_alter_column_imageUrl/down.sql @@ -0,0 +1,3 @@ +ALTER TABLE public."ExplorerType" + RENAME COLUMN image_url TO "imageUrl" +; diff --git a/hasura/migrations/1640532635456_alter_table_public_ExplorerType_alter_column_imageUrl/up.sql b/hasura/migrations/1640532635456_alter_table_public_ExplorerType_alter_column_imageUrl/up.sql new file mode 100644 index 0000000000..3cfb92e0a6 --- /dev/null +++ b/hasura/migrations/1640532635456_alter_table_public_ExplorerType_alter_column_imageUrl/up.sql @@ -0,0 +1,3 @@ +ALTER TABLE public."ExplorerType" + RENAME COLUMN "imageUrl" TO image_url +; diff --git a/hasura/migrations/1640532806659_alter_table_public_profile_add_column_explorer_type_title/down.sql b/hasura/migrations/1640532806659_alter_table_public_profile_add_column_explorer_type_title/down.sql new file mode 100644 index 0000000000..6e2a42863a --- /dev/null +++ b/hasura/migrations/1640532806659_alter_table_public_profile_add_column_explorer_type_title/down.sql @@ -0,0 +1,3 @@ +ALTER TABLE public.profile + DROP COLUMN IF EXISTS explorer_type_title +; diff --git a/hasura/migrations/1640532806659_alter_table_public_profile_add_column_explorer_type_title/up.sql b/hasura/migrations/1640532806659_alter_table_public_profile_add_column_explorer_type_title/up.sql new file mode 100644 index 0000000000..64f4ca0ba2 --- /dev/null +++ b/hasura/migrations/1640532806659_alter_table_public_profile_add_column_explorer_type_title/up.sql @@ -0,0 +1,11 @@ +ALTER TABLE public.profile + ADD COLUMN IF NOT EXISTS explorer_type_title text NULL +; + +UPDATE profile + SET explorer_type_title = "ExplorerType".title + FROM player, "ExplorerType" + WHERE profile.player_id = player.id + AND player.player_type_id = "ExplorerType".id + AND profile.explorer_type_title IS NULL +; diff --git a/hasura/migrations/1640532883795_set_fk_public_profile_explorer_type_title/down.sql b/hasura/migrations/1640532883795_set_fk_public_profile_explorer_type_title/down.sql new file mode 100644 index 0000000000..a5fb0879c2 --- /dev/null +++ b/hasura/migrations/1640532883795_set_fk_public_profile_explorer_type_title/down.sql @@ -0,0 +1,3 @@ +ALTER TABLE public.profile + DROP CONSTRAINT profile_explorer_type_title_fkey +; diff --git a/hasura/migrations/1640532883795_set_fk_public_profile_explorer_type_title/up.sql b/hasura/migrations/1640532883795_set_fk_public_profile_explorer_type_title/up.sql new file mode 100644 index 0000000000..ec0a70ac94 --- /dev/null +++ b/hasura/migrations/1640532883795_set_fk_public_profile_explorer_type_title/up.sql @@ -0,0 +1,6 @@ +ALTER TABLE public.profile + ADD CONSTRAINT profile_explorer_type_title_fkey + FOREIGN KEY (explorer_type_title) + REFERENCES public."ExplorerType"(title) + ON UPDATE cascade ON DELETE restrict +; diff --git a/hasura/migrations/1640533703696_alter_table_public_profile_add_unique_id/down.sql b/hasura/migrations/1640533703696_alter_table_public_profile_add_unique_id/down.sql new file mode 100644 index 0000000000..eb8afae250 --- /dev/null +++ b/hasura/migrations/1640533703696_alter_table_public_profile_add_unique_id/down.sql @@ -0,0 +1,3 @@ +ALTER TABLE public.profile + DROP CONSTRAINT profile_id_key +; diff --git a/hasura/migrations/1640533703696_alter_table_public_profile_add_unique_id/up.sql b/hasura/migrations/1640533703696_alter_table_public_profile_add_unique_id/up.sql new file mode 100644 index 0000000000..95974dad40 --- /dev/null +++ b/hasura/migrations/1640533703696_alter_table_public_profile_add_unique_id/up.sql @@ -0,0 +1,3 @@ +ALTER TABLE public.profile + ADD CONSTRAINT profile_id_key UNIQUE (id) +; diff --git a/hasura/migrations/1641944106670_alter_table_public_player_drop_column_player_type_id/down.sql b/hasura/migrations/1641944106670_alter_table_public_player_drop_column_player_type_id/down.sql new file mode 100644 index 0000000000..5bbc4b0c38 --- /dev/null +++ b/hasura/migrations/1641944106670_alter_table_public_player_drop_column_player_type_id/down.sql @@ -0,0 +1,3 @@ +ALTER TABLE "public"."player" ADD COLUMN "player_type_id" int4; +ALTER TABLE "public"."player" ALTER COLUMN "player_type_id" DROP NOT NULL; +ALTER TABLE "public"."player" ADD CONSTRAINT Player_playerTypeId_fkey FOREIGN KEY (player_type_id) REFERENCES "public"."ExplorerType" (id) ON DELETE restrict ON UPDATE cascade; diff --git a/hasura/migrations/1641944106670_alter_table_public_player_drop_column_player_type_id/up.sql b/hasura/migrations/1641944106670_alter_table_public_player_drop_column_player_type_id/up.sql new file mode 100644 index 0000000000..879aadbb57 --- /dev/null +++ b/hasura/migrations/1641944106670_alter_table_public_player_drop_column_player_type_id/up.sql @@ -0,0 +1 @@ +ALTER TABLE "public"."player" DROP COLUMN "player_type_id" CASCADE; diff --git a/hasura/migrations/1642440758431_remove_eth_address_usernames/up.sql b/hasura/migrations/1642440758431_remove_eth_address_usernames/up.sql new file mode 100644 index 0000000000..c8ef933852 --- /dev/null +++ b/hasura/migrations/1642440758431_remove_eth_address_usernames/up.sql @@ -0,0 +1,4 @@ +UPDATE public.profile + SET username = NULL + WHERE username ~* '^0x[0-9a-z]{40}$' +; diff --git a/hasura/seed-local-db.js b/hasura/seed-local-db.js deleted file mode 100644 index 97c3783394..0000000000 --- a/hasura/seed-local-db.js +++ /dev/null @@ -1,264 +0,0 @@ -/* eslint-disable */ -const fetch = require('node-fetch'); -const gql = require('fake-tag'); - -const PRODUCTION_GRAPHQL_URL = process.env.PRODUCTION_GRAPHQL_URL || 'https://api.metagame.wtf/v1/graphql'; -const LOCAL_GRAPHQL_URL = process.env.LOCAL_GRAPHQL_URL || 'http://localhost:8080/v1/graphql'; -const LOCAL_BACKEND_ACCOUNT_MIGRATION_URL = process.env.LOCAL_BACKEND_ACCOUNT_MIGRATION_URL || 'http://localhost:4000/actions/migrateSourceCredAccounts\?force\=true'; -const HASURA_GRAPHQL_ADMIN_SECRET = process.env.HASURA_GRAPHQL_ADMIN_SECRET || 'metagame_secret'; -const NUM_PLAYERS = process.env.SEED_NUM_PLAYERS || 300; - -const authHeaders = { - ['content-type']: 'application/json', - ['x-hasura-admin-secret']: HASURA_GRAPHQL_ADMIN_SECRET, -} - -async function fetchGraphQL(url, operationsDoc, operationName, variables = {}, isUpdate = false) { - const result = await fetch(url, { - method: 'POST', - body: JSON.stringify({ - query: operationsDoc, - variables: variables, - operationName: operationName, - }), - headers: isUpdate ? authHeaders : undefined, - }); - - return await result.json(); -} - -const topPlayersQuery = gql` - query GetTopPlayers { - player( - limit: ${NUM_PLAYERS} - order_by: { total_xp: desc } - where: { - availability_hours: { _gte: 0 } - timezone: { _in: null } - type: { id: { _in: null } } - skills: { Skill: { id: { _in: null } } } - } - ) { - id - username - ethereum_address - availability_hours - timezone - color_aspect { - mask - } - type { - id - } - skills { - Skill { - id - category - name - } - } - } - } -`; - -async function fetchTopPlayers() { - const { errors, data } = await fetchGraphQL( - PRODUCTION_GRAPHQL_URL, - topPlayersQuery, - 'GetTopPlayers', - ); - - if (errors) { - // handle those errors like a pro - errors.map((e) => { - throw e; - }); - } - - return data.player; -} - -const getPlayerIdsAndSkillsQuery = gql` - query GetPlayerIds($addresses: [String!]) { - player(where: { ethereum_address: { _in: $addresses } }) { - id - ethereum_address - } - skill { - id - category - name - } - } -`; - -async function fetchPlayerIdsAndSkills(addresses) { - const { errors, data } = await fetchGraphQL( - LOCAL_GRAPHQL_URL, - getPlayerIdsAndSkillsQuery, - 'GetPlayerIds', - { - addresses, - }, - ); - - if (errors) { - // handle those errors like a pro - errors.map((e) => { - throw e; - }); - } - - const ids = {}; - data.player.map(({ id, ethereum_address }) => { - ids[ethereum_address] = id; - }); - return { ids, skills: data.skill }; -} - -const deleteSkillsMutation = gql` - mutation DeleteSkills { - delete_player_skill(where: {}) { - affected_rows - } - } -`; - -async function deleteSkills() { - const { errors } = await fetchGraphQL( - LOCAL_GRAPHQL_URL, - deleteSkillsMutation, - 'DeleteSkills', - {}, - true - ); - - if (errors) { - // handle those errors like a pro - errors.map((e) => { - throw e; - }); - } -} - -const updatePlayerMutation = gql` - mutation UpdatePlayer( - $playerId: uuid! - $availability: Int - $timezone: String - $playerTypeId: Int - $colorMask: Int - $username: String - $skills: [player_skill_insert_input!]! - ) { - insert_player_skill(objects: $skills) { - affected_rows - } - update_player_by_pk( - pk_columns: { id: $playerId } - _set: { - player_type_id: $playerTypeId - timezone: $timezone - availability_hours: $availability - color_mask: $colorMask - username: $username - } - ) { - id - username - ethereum_address - availability_hours - timezone - color_aspect { - mask - } - type { - id - } - skills { - Skill { - id - } - } - } - } -`; - -async function updatePlayer(variables) { - const { errors, data } = await fetchGraphQL( - LOCAL_GRAPHQL_URL, - updatePlayerMutation, - 'UpdatePlayer', - variables, - true - ); - - if (errors) { - // handle those errors like a pro - errors.map((e) => { - throw e; - }); - } - - return data.update_player_by_pk; -} - -const skillsMap = {}; - -function getSkillId(skills, { Skill: { category, name } }) { - const skillMapId = category + name; - if (!skillsMap[skillMapId]) { - skills.map((skill) => { - skillsMap[skill.category + skill.name] = skill.id; - }); - } - return skillsMap[skillMapId]; -} - -async function forceMigrateAccounts() { - const result = await fetch(LOCAL_BACKEND_ACCOUNT_MIGRATION_URL, { - method: 'POST', - }); - return await result.json(); -} - -async function startSeeding() { - console.log(`Force migrating sourcecred users into local db`); - const result = await forceMigrateAccounts(); - console.log(result); - console.log(`Fetching players from prod db`); - const players = await fetchTopPlayers(); - const addresses = players.map((p) => p.ethereum_address); - console.log(`Fetching player ids for players from local db`); - const { ids, skills } = await fetchPlayerIdsAndSkills(addresses); - const mutations = players.map(player => { - const id = ids[player.ethereum_address]; - if (!id) return undefined; - return { - playerId: id, - availability: player.availability_hours, - timezone: player.timezone, - playerTypeId: player.type.id, - colorMask: player.color_aspect?.mask || null, - username: player.username, - skills: - player.skills.map((skill) => ({ - skill_id: getSkillId(skills, skill), - player_id: id, - })), - }; - }).filter(m => !!m); - console.log( - `Updating player information in local db for players in prod db`, - ); - await deleteSkills(); - const updated = await Promise.all(mutations.map((mutation) => updatePlayer(mutation))); - console.log(`Successfully seeded local db with ${updated.length} players`); -} - -startSeeding() - .then(() => process.exit(0)) - .catch((error) => { - console.error(error); - process.exit(1); - }); diff --git a/hasura/seed-local-db.mjs b/hasura/seed-local-db.mjs new file mode 100644 index 0000000000..1fe21ecbce --- /dev/null +++ b/hasura/seed-local-db.mjs @@ -0,0 +1,295 @@ +import Bottleneck from 'bottleneck'; +import fetch from 'node-fetch'; + +/* eslint-disable no-console */ + +const PRODUCTION_GRAPHQL_URL = ( + process.env.PRODUCTION_GRAPHQL_URL + || 'https://api.metagame.wtf/v1/graphql' +); +const LOCAL_GRAPHQL_URL = ( + process.env.LOCAL_GRAPHQL_URL || 'http://localhost:8080/v1/graphql' +); +const LOCAL_BACKEND_ACCOUNT_MIGRATION_URL = ( + process.env.LOCAL_BACKEND_ACCOUNT_MIGRATION_URL + || 'http://localhost:4000/actions/migrateSourceCredAccounts?force=true' +); +const HASURA_GRAPHQL_ADMIN_SECRET = ( + process.env.HASURA_GRAPHQL_ADMIN_SECRET || 'metagame_secret' +); +const NUM_PLAYERS = process.env.SEED_NUM_PLAYERS || 300; + +const authHeaders = { + 'content-type': 'application/json', + 'x-hasura-admin-secret': HASURA_GRAPHQL_ADMIN_SECRET, +} + +async function fetchGraphQL( + url, operationsDoc, operationName, variables = {}, isUpdate = false +) { + const result = await fetch(url, { + method: 'POST', + body: JSON.stringify({ + query: operationsDoc, + variables, + operationName, + }), + headers: isUpdate ? authHeaders : undefined, + }); + + const body = await result.text() + try { + return JSON.parse(body); + } catch (err) { + console.error(`JSON Error: ${err.message}`); + console.error(body); + return { errors: [err.message] }; + } +} + +const topPlayersQuery = /* GraphQL */` + query GetTopPlayers { + player( + limit: ${NUM_PLAYERS} + order_by: { total_xp: desc } + ) { + id + username + ethereum_address + availability_hours + timezone + color_mask + type { + title + } + skills { + Skill { + id + category + name + } + } + } + } +`; + +async function fetchTopPlayers() { + const { errors, data } = await fetchGraphQL( + PRODUCTION_GRAPHQL_URL, + topPlayersQuery, + 'GetTopPlayers', + ); + + if (errors) { + // handle those errors like a pro + errors.map((e) => { + throw e; + }); + } + + return data.player; +} + +const getPlayerIdsAndSkillsQuery = /* GraphQL */` + query GetPlayerIds($addresses: [String!]) { + player(where: { ethereumAddress: { _in: $addresses } }) { + id + ethereumAddress + } + skill { + id + category + name + } + } +`; + +async function fetchPlayerIdsAndSkills(addresses) { + const { errors, data } = await fetchGraphQL( + LOCAL_GRAPHQL_URL, + getPlayerIdsAndSkillsQuery, + 'GetPlayerIds', + { addresses }, + ); + + if (errors) { + throw errors[0] + } + + const ids = Object.fromEntries( + data.player.map(({ id, ethereumAddress }) => ( + [ethereumAddress, id] + )) + ); + return { ids, skills: data.skill }; +} + +const deleteSkillsMutation = /* GraphQL */` + mutation DeleteSkills { + delete_player_skill(where: {}) { + affected_rows + } + } +`; + +async function deleteSkills() { + const { errors } = await fetchGraphQL( + LOCAL_GRAPHQL_URL, + deleteSkillsMutation, + 'DeleteSkills', + {}, + true, + ); + + if (errors) throw errors[0]; +} + +const upsertPlayerMutation = /* GraphQL */` + mutation UpsertPlayer( + $playerId: uuid! + $availableHours: Int + $timeZone: String + $explorerTypeTitle: String + $colorMask: Int + $username: String + $skills: [player_skill_insert_input!]! + ) { + insert_player_skill(objects: $skills) { + affected_rows + } + insert_profile( + objects: [{ + playerId: $playerId + explorerTypeTitle: $explorerTypeTitle + timeZone: $timeZone + availableHours: $availableHours + colorMask: $colorMask + username: $username + }] + on_conflict: { + constraint: profile_player_id_key + update_columns: [ + explorerTypeTitle + timeZone + availableHours + colorMask + username + ] + } + ) { + affected_rows + returning { + username + availableHours + timeZone + colorMask + explorerTypeTitle + player { + id + ethereumAddress + skills { + Skill { + id + } + } + } + } + } + } +`; + +async function upsertPlayer(variables) { + const { errors, data } = await fetchGraphQL( + LOCAL_GRAPHQL_URL, + upsertPlayerMutation, + 'UpsertPlayer', + variables, + true, + ); + + if (errors) { + console.error({ errors }); + throw errors[0]; + } + + return data.update_profile; +} + +const skillsMap = {}; + +function getSkillId(skills, { Skill: { category, name } }) { + const skillMapId = category + name; + if (!skillsMap[skillMapId]) { + skills.forEach((skill) => { + skillsMap[skill.category + skill.name] = skill.id; + }); + } + return skillsMap[skillMapId]; +} + +async function forceMigrateAccounts() { + const result = await fetch(LOCAL_BACKEND_ACCOUNT_MIGRATION_URL, { + method: 'POST', + }); + const json = await result.json(); + return json; +} + +async function startSeeding() { + console.debug(`Force migrating sourcecred users with: ${LOCAL_BACKEND_ACCOUNT_MIGRATION_URL}`); + const result = await forceMigrateAccounts(); + console.debug(result); + console.debug(`Fetching players from: ${PRODUCTION_GRAPHQL_URL}`); + const players = await fetchTopPlayers(); + const addresses = players.map(({ ethereum_address }) => ethereum_address); + console.debug(`Fetching player ids for players from ${LOCAL_GRAPHQL_URL} for ${addresses.length} addresses`); + const { ids, skills } = await fetchPlayerIdsAndSkills(addresses); + console.debug(`Fetched ${Object.keys(ids).length} player ids for players from addresses.`); + const mutations = ( + players.map((player, idx) => { + const playerId = ids[player.ethereum_address]; + if (!playerId) return undefined; + return { + ethereumAddress: player.ethereum_address, + count: idx + 1, + variables: { + playerId, + availableHours: player.availability_hours, + timeZone: player.timezone, + explorerTypeTitle: player.type?.title, + colorMask: player.color_mask ?? null, + username: player.username, + skills: ( + player.skills.map((skill) => ({ + skill_id: getSkillId(skills, skill), + player_id: playerId, + })) + ), + } + }; + }) + .filter(m => !!m) + ); + console.debug( + `Updating ${mutations.length} players information in ${LOCAL_GRAPHQL_URL}`, + ); + await deleteSkills(); + const limiter = new Bottleneck({ + maxConcurrent: 30, + minTime: 100, // 100 = 10 / second + }); + const updated = await Promise.all(mutations.map( + ({ ethereumAddress, count, variables }) => { + console.debug(`${count.toString().padStart(3, '0')}: Updating ${ethereumAddress} ("${variables.username}")`); + return limiter.schedule(() => upsertPlayer(variables)) + } + )); + console.debug(`Successfully seeded local db with ${updated.length} players`); +} + +startSeeding() +.then(() => process.exit(0)) +.catch((error) => { + console.error(error); + process.exit(1); +}); diff --git a/package.json b/package.json index 5083f3eb8f..ec23747956 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,7 @@ "docker:build": "docker-compose up --build -d", "docker:stop": "docker-compose down", "docker:clean": "docker-compose down -v", + "build": "lerna run build", "web:dev": "lerna run dev --parallel --scope @metafam/web --include-dependencies", "web:build": "lerna run build --scope @metafam/web --include-dependencies --stream", "backend:dev": "lerna run dev --parallel --scope @metafam/backend", @@ -19,7 +20,7 @@ "hasura": "hasura --project ./hasura", "hasura:console": "yarn hasura console --no-browser", "hasura:migrate:init": "yarn hasura migrate create \"init\" --from-server", - "hasura:seed-local-db": "node hasura/seed-local-db.js", + "hasura:seed-local-db": "node hasura/seed-local-db.mjs", "test": "lerna run test --parallel --", "generate": "lerna run generate --parallel --", "test:full": "yarn lint && yarn typecheck && yarn test", @@ -50,16 +51,16 @@ "@graphql-codegen/typescript-react-apollo": "2.2.4", "@graphql-codegen/typescript-resolvers": "1.19.1", "@graphql-codegen/typescript-urql": "2.0.6", - "@types/jsdom": "^16.2.13", + "@types/jsdom": "16.2.13", "@types/node": "15.6.1", "@types/react": "17.0.6", "@types/react-dom": "17.0.5", - "@typescript-eslint/eslint-plugin": "4.32.0", + "@typescript-eslint/eslint-plugin": "5.5.0", "@typescript-eslint/parser": "5.4.0", "env-cmd": "10.1.0", "eslint": "7.32.0", "eslint-config-airbnb": "19.0.2", - "eslint-config-airbnb-typescript": "13.0.1", + "eslint-config-airbnb-typescript": "16.1.0", "eslint-config-prettier": "8.3.0", "eslint-import-resolver-typescript": "2.4.0", "eslint-plugin-import": "2.23.4", @@ -76,15 +77,19 @@ "lerna": "4.0.0", "lint-staged": "11.0.0", "prettier": "2.2.1", - "react": "^17.0.2", - "react-dom": "^17.0.2", - "tsdx": "0.14.1", + "react": "17.0.2", + "react-dom": "17.0.2", "standard-version": "9.3.2", + "tsdx": "0.14.1", "typescript": "4.5.4", "wait-on": "5.3.0" }, "resolutions": { - "bcrypto": "5.2.0" + "bcrypto": "5.2.0", + "typescript": "4.5.4" }, - "dependencies": {} + "dependencies": { + "bottleneck": "^2.19.5", + "node-fetch": "3.1.0" + } } diff --git a/packages/@types/3box/index.d.ts b/packages/@types/3box/index.d.ts deleted file mode 100644 index 9342bd5e67..0000000000 --- a/packages/@types/3box/index.d.ts +++ /dev/null @@ -1,71 +0,0 @@ -declare module '3box' { - export interface BoxProfile { - proof_did?: string; - collectiblesFavorites?: CollectiblesFavorite[]; - name?: string; - proof_twitter?: string; - proof_github?: string; - coverPhoto?: Image[]; - image?: Image[]; - emoji?: string; - job?: string; - employer?: string; - description?: string; - year?: string; - degree?: string; - school?: string; - website?: string; - location?: string; - memberSince?: string; - } - - interface CollectiblesFavorite { - address: string; - token_id: string; - } - - interface Image { - '@type': string; - contentUrl: { - '/': string; - }; - } - - export async function getProfile(ethAddress: string): Promise; - - interface VerifiedAccounts { - did: string; - github?: { - proof: string; - username: string; - }; - twitter?: { - proof: string; - username: string; - }; - } - - export async function getVerifiedAccounts( - boxProfile: BoxProfile, - ): Promise; - - interface BoxSpace { - public: { - get(key: string): Promise; - set(key: string, data: string): Promise; - }; - } - - interface Box { - syncDone: Promise; - openSpace: (url: string) => Promise; - twitter?: { - proof: string; - username: string; - }; - } - export async function openBox( - ethAddress: string, - web3: unknown, - ): Promise; -} diff --git a/packages/backend/jest.config.js b/packages/backend/jest.config.js new file mode 100644 index 0000000000..25c9bac510 --- /dev/null +++ b/packages/backend/jest.config.js @@ -0,0 +1,3 @@ +module.exports = { + testEnvironment: 'node', +}; diff --git a/packages/backend/package.json b/packages/backend/package.json index f2bf16c0cd..1bc5dfd813 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -15,25 +15,32 @@ "lintfix": "eslint --fix", "test": "tsdx test --passWithNoTests" }, - "author": "", + "author": "MetaFam", "license": "ISC", "dependencies": { - "3box": "1.22.2", - "@ceramicnetwork/http-client": "1.0.7", - "@ceramicstudio/idx": "0.12.2", - "@ceramicstudio/idx-constants": "1.0.0", + "@ceramicnetwork/common": "1.8.0", + "@ceramicnetwork/http-client": "1.5.0", + "@ceramicnetwork/stream-caip10-link": "1.2.2", + "@datamodels/identity-accounts-web": "0.1.2", + "@datamodels/identity-profile-basic": "0.1.2", + "@glazed/datamodel": "0.2.0", + "@glazed/devtools": "0.1.3", + "@glazed/did-datastore": "0.2.1", + "@glazed/tile-loader": "0.1.2", + "@graphql-tools/schema": "8.3.1", "@metafam/discord-bot": "0.1.0", - "@metafam/utils": "1.0.0", + "@metafam/utils": "1.0.1", + "@self.id/3box-legacy": "0.1.0", "@types/bluebird": "3.5.34", "@types/body-parser": "1.19.0", - "@types/busboy": "^0.2.4", + "@types/cors": "2.8.12", "@types/express": "4.17.11", "@types/node-fetch": "2.5.10", "@types/uuid": "8.3.0", "bluebird": "3.7.2", "body-parser": "1.19.0", "bottleneck": "2.19.5", - "cors": "^2.8.5", + "cors": "2.8.5", "discord.js": "12.5.3", "dotenv": "^10.0.0", "ethers": "5.4.3", @@ -44,16 +51,12 @@ "graphql-tag": "2.12.5", "graphql-tools": "7.0.4", "imgix-core-js": "2.3.2", - "multer": "^1.4.3", "node-fetch": "2.6.1", "sourcecred": "0.9.0", "uuid": "8.3.2", - "web3.storage": "^3.3.1" + "web3.storage": "3.3.1" }, "devDependencies": { - "@types/busboy": "^0.2.4", - "@types/cors": "^2.8.12", - "@types/multer": "^1.4.7", "concurrently": "6.1.0", "eslint-plugin-jest": "24.3.6", "nock": "13.2.1", diff --git a/packages/backend/src/config.ts b/packages/backend/src/config.ts index 754a48f378..484738aef5 100644 --- a/packages/backend/src/config.ts +++ b/packages/backend/src/config.ts @@ -12,14 +12,11 @@ interface IConfig { seedGraphqlURL: string; githubApiToken: string; adminKey: string; - ipfsEndpoint: string; - imgixToken: string; infuraId: string; pSEEDAddress: string; brightIdAppURL: string; sourceCredLedgerBranch: string; ceramicURL: string; - web3StorageToken: string; } function parseEnv( @@ -67,8 +64,6 @@ export const CONFIG: IConfig = { 'metagame_secret', ), githubApiToken: parseEnv(process.env.GITHUB_API_TOKEN, ''), - ipfsEndpoint: parseEnv(process.env.IPFS_ENDPOINT, 'https://ipfs.infura.io'), - imgixToken: parseEnv(process.env.IMGIX_TOKEN, ''), pSEEDAddress: parseEnv( process.env.PSEED_ADDRESS, '0x34a01c0a95b0592cc818cd846c3cf285d6c85a31', @@ -87,7 +82,8 @@ export const CONFIG: IConfig = { ), ceramicURL: parseEnv( process.env.CERAMIC_URL, - 'https://d12-a-ceramic.3boxlabs.com', // 'http://localhost:7007', + 'https://ceramic.metagame.wtf' || + 'https://d12-a-ceramic.3boxlabs.com' || + 'http://localhost:7007', ), - web3StorageToken: parseEnv(process.env.WEB3_STORAGE_TOKEN, ''), }; diff --git a/packages/backend/src/handlers/actions/guild/saveGuildHandler.ts b/packages/backend/src/handlers/actions/guild/saveGuildHandler.ts index aadbb4d1e6..bce1183b25 100644 --- a/packages/backend/src/handlers/actions/guild/saveGuildHandler.ts +++ b/packages/backend/src/handlers/actions/guild/saveGuildHandler.ts @@ -5,7 +5,6 @@ import { Guild_Set_Input, GuildInfo, GuildType_Enum, - SaveGuildResponse, } from '../../../lib/autogen/hasura-sdk'; import { client } from '../../../lib/hasuraClient'; @@ -14,72 +13,66 @@ export const saveGuildHandler = async ( res: Response, ): Promise => { const { input, session_variables: sessionVariables } = req.body; - const playerId = sessionVariables['x-hasura-user-id']; try { const { guildInformation } = input; await saveGuild(playerId, guildInformation as GuildInfo); - + res.json({ success: true }); + } catch (error) { + console.error(error); res.json({ - success: true, - }); - } catch (e) { - const error = (e as Error).message; - const errorResponse: SaveGuildResponse = { success: false, - error, - }; - console.error(error); - res.json(errorResponse); + error: (error as Error).message, + }); } }; const saveGuild = async (playerId: string, guildInfo: GuildInfo) => { - const existingGuildMetadataResponse = await client.GetGuildMetadataById({ - id: guildInfo.uuid, - }); - if ( - existingGuildMetadataResponse?.guild_metadata == null || - existingGuildMetadataResponse.guild_metadata.length !== 1 - ) { - throw new Error('No pending guild with that ID exists'); + const { guild_metadata: metadata } = + (await client.GetGuildMetadataById({ + id: guildInfo.uuid, + })) ?? {}; + if (metadata == null || metadata.length === 0) { + throw new Error('No pending guild with that ID exists.'); } - const existingGuildMetadata = existingGuildMetadataResponse.guild_metadata[0]; + const [ + { creator_id: creatorId, discord_metadata: discordMetadata }, + ] = metadata; - if (existingGuildMetadata.creator_id !== playerId) { + if (creatorId !== playerId) { throw new Error( - "Only the guild's discord server owner can edit this guild", + 'Only the guild’s discord server owner can edit this guild.', ); } - const updateGuildData: Guild_Set_Input = { + const updatedData: Guild_Set_Input = { guildname: guildInfo.guildname, name: guildInfo.name, type: (guildInfo.type as unknown) as GuildType_Enum, description: guildInfo.description, - discord_invite_url: guildInfo.discordInviteUrl, - join_button_url: guildInfo.joinUrl, - logo: guildInfo.logoUrl, - website_url: guildInfo.websiteUrl, - twitter_url: guildInfo.twitterUrl, - github_url: guildInfo.githubUrl, + discord_invite_url: guildInfo.discordInviteURL, + join_button_url: guildInfo.joinURL, + logo: guildInfo.logoURL, + website_url: guildInfo.websiteURL, + twitter_url: guildInfo.twitterURL, + github_url: guildInfo.githubURL, moloch_address: guildInfo.daoAddress, }; await client.UpdateGuild({ guildId: guildInfo.uuid, - object: updateGuildData, + object: updatedData, }); - const updateGuildMetadata: GuildDiscordMetadata = { - ...existingGuildMetadata.discord_metadata, + const updatedMetadata: GuildDiscordMetadata = { + ...discordMetadata, membershipRoleIds: guildInfo.discordMembershipRoles, administratorRoleIds: guildInfo.discordAdminRoles, }; await client.UpdateGuildDiscordMetadata({ guildId: guildInfo.uuid, - discordMetadata: updateGuildMetadata, + discordMetadata: updatedMetadata, }); }; diff --git a/packages/backend/src/handlers/actions/idxCache/updateExpiredProfiles/handler.ts b/packages/backend/src/handlers/actions/idxCache/updateExpiredProfiles/handler.ts index dbdcd70770..40828996d3 100644 --- a/packages/backend/src/handlers/actions/idxCache/updateExpiredProfiles/handler.ts +++ b/packages/backend/src/handlers/actions/idxCache/updateExpiredProfiles/handler.ts @@ -1,33 +1,29 @@ import { Request, Response } from 'express'; +import { queueRecache } from '../../../../lib/cacheHelper'; import { client } from '../../../../lib/hasuraClient'; -import updateCachedProfile from '../updateSingle'; const INVALIDATE_AFTER_DAYS = 4; // number of days after which to recache export default async (req: Request, res: Response): Promise => { + const { limiter } = req.app.locals; const expiration = new Date(); - expiration.setDate(expiration.getDate() - INVALIDATE_AFTER_DAYS); - const { profile_cache: players } = await client.GetCacheEntries({ + const invalidateAfterDays = + req.query.invalidate_after_days != null + ? parseInt(req.query.invalidate_after_days as string, 10) + : INVALIDATE_AFTER_DAYS; + expiration.setDate(expiration.getDate() - invalidateAfterDays); + const { profile: players } = await client.GetCacheEntries({ updatedBefore: expiration, }); - const idsToProcess: string[] = []; - await Promise.all( - players.map(async ({ playerId }) => { - if (!req.app.locals.queuedRecacheFor[playerId]) { - req.app.locals.queuedRecacheFor[playerId] = true; - idsToProcess.push(playerId); - req.app.locals.limiter.schedule(() => - (async () => { - try { - await updateCachedProfile(playerId); - } finally { - req.app.locals.queuedRecacheFor[playerId] = false; - } - })(), - ); - } - }), - ); - res.json({ ids: idsToProcess }); + const ids = ( + await Promise.all( + players.map(async ({ playerId }) => { + const queued = await queueRecache({ playerId, limiter }); + return queued ? playerId : null; + }), + ) + ).filter((id) => !!id); + + res.json({ ids }); }; diff --git a/packages/backend/src/handlers/actions/idxCache/updateSingle.ts b/packages/backend/src/handlers/actions/idxCache/updateSingle.ts index 9182ef9fb2..93c6d77c78 100644 --- a/packages/backend/src/handlers/actions/idxCache/updateSingle.ts +++ b/packages/backend/src/handlers/actions/idxCache/updateSingle.ts @@ -1,139 +1,248 @@ -import type { CeramicApi } from '@ceramicnetwork/common'; import Ceramic from '@ceramicnetwork/http-client'; -import { getLegacy3BoxProfileAsBasicProfile, IDX } from '@ceramicstudio/idx'; -// https://github.com/ceramicnetwork/CIP/blob/main/CIPs/CIP-19/CIP-19.md#record-schema -import type { BasicProfile } from '@ceramicstudio/idx-constants'; -import Box from '3box'; +import { Caip10Link } from '@ceramicnetwork/stream-caip10-link'; +import { + Account, + AlsoKnownAs, + model as alsoKnownAsModel, +} from '@datamodels/identity-accounts-web'; +import { + BasicProfile, + ImageSources, + model as basicProfileModel, +} from '@datamodels/identity-profile-basic'; +import { ModelManager } from '@glazed/devtools'; +import { DIDDataStore } from '@glazed/did-datastore'; +import { TileLoader } from '@glazed/tile-loader'; +import { + BasicProfileImages, + BasicProfileStrings, + ExtendedProfile, + ExtendedProfileImages, + extendedProfileModel, + ExtendedProfileObjects, + ExtendedProfileStrings, + HasuraProfileProps, + Values, +} from '@metafam/utils'; +import { getLegacy3BoxProfileAsBasicProfile } from '@self.id/3box-legacy'; import { CONFIG } from '../../../config'; import { AccountType_Enum, - UpdateBoxProfileResponse, + Maybe, + Profile_Update_Column, + UpdateIdxProfileResponse, } from '../../../lib/autogen/hasura-sdk'; +import { maskFor } from '../../../lib/colorHelpers'; import { client } from '../../../lib/hasuraClient'; -import { optimizeImage, OptimizeImageParams } from '../../../lib/imageHelpers'; - -function getImage(image: string | null | undefined, opts: OptimizeImageParams) { - const [, imageHash] = image?.match(/^ipfs:\/\/(.+)$/) ?? []; - - if (imageHash) { - return optimizeImage(`${CONFIG.ipfsEndpoint}/ipfs/${imageHash}`, opts); - } - return image; -} - -const ceramic = (new Ceramic(CONFIG.ceramicURL) as unknown) as CeramicApi; -const idx = new IDX({ ceramic }); -export default async (playerId: string): Promise => { - const updatedProfiles: string[] = []; +export default async (playerId: string): Promise => { + const accountLinks: string[] = []; + const fields: string[] = []; const { player_by_pk: player } = await client.GetPlayer({ playerId }); - const ethAddress = player?.ethereum_address; + const { ethereumAddress } = player ?? {}; + let did = null; - if (!ethAddress) { - throw new Error('unknown-player'); + if (!ethereumAddress) { + throw new Error(`Unknown Player: "${playerId}"`); + } else { + console.debug(`Updating Profile Cache For ${ethereumAddress}`); } - let idxProfile; try { - idxProfile = await idx.get( - 'basicProfile', - `${ethAddress.toLowerCase()}@eip155:1`, - ); - } catch (err) { - const msg = (err as Error).message; - - if (!msg.includes('No DID')) { - throw err; + const cache = new Map(); + const ceramic = new Ceramic(CONFIG.ceramicURL); + const loader = new TileLoader({ ceramic, cache }); + const manager = new ModelManager(ceramic); + manager.addJSONModel(basicProfileModel); + manager.addJSONModel(alsoKnownAsModel); + manager.addJSONModel(extendedProfileModel); + + const store = new DIDDataStore({ + ceramic, + loader, + model: await manager.toPublished(), + }); + ({ did } = await Caip10Link.fromAccount( + ceramic, + // Defaulting to mainnet. This may cause data irregularities + // if their wallet is connected to a different DID on a + // different chain. + `${ethereumAddress.toLowerCase()}@eip155:1`, + )); + const values: HasuraProfileProps = {}; + let basicProfile: Maybe = null; + let extendedProfile: Maybe = null; + + if (!did) { + console.debug(`No CAIP-10 Link For ${ethereumAddress}`); + } else { + basicProfile = await store.get('basicProfile', did); } - } - if (!idxProfile) { - idxProfile = await getLegacy3BoxProfileAsBasicProfile(ethAddress); - } + // This isn't called if they haven't created a mainnet DID + // This should be checked even without a DID + if (!basicProfile) { + basicProfile = await getLegacy3BoxProfileAsBasicProfile(ethereumAddress); + } - if (!idxProfile) { - console.info(`No Profile For: ${ethAddress}`); - idxProfile = {}; // create an empty placeholder row - } + if (!basicProfile) { + console.debug(`No Basic Profile For: ${ethereumAddress} (${did})`); + } else { + Object.entries(BasicProfileStrings).forEach(([hasuraId, ceramicId]) => { + const fromKey = ceramicId as Values; + const toKey = hasuraId as keyof typeof BasicProfileStrings; + if (basicProfile?.[fromKey] != null) { + values[toKey] = (basicProfile[fromKey] as string) ?? null; + } + }); + Object.entries(BasicProfileImages).forEach(([hasuraId, ceramicId]) => { + const fromKey = ceramicId as Values; + const toKey = hasuraId as keyof typeof BasicProfileImages; + if (basicProfile?.[fromKey] != null) { + values[toKey] = (basicProfile[fromKey] as ImageSources).original.src; + } + }); + } - const { - name, - description, - emoji, - gender, - url, - homeLocation: location, - residenceCountry: country, - image, - background, - } = idxProfile; - const values = { - playerId, - name, - description, - emoji, - imageURL: getImage(image?.original?.src, { - ar: '1:1', - height: 200, - }), - backgroundImageURL: getImage(background?.original?.src, { - height: 300, - }), - gender, - location, - country, - website: url, - }; + if (did) { + extendedProfile = await store.get('extendedProfile', did); + + if (!extendedProfile) { + console.debug(`No Extended Profile For: ${ethereumAddress} (${did})`); + } else { + Object.entries(ExtendedProfileStrings).forEach( + ([hasuraId, ceramicId]) => { + const fromKey = ceramicId as Values; + const toKey = hasuraId as keyof typeof ExtendedProfileStrings; + if (extendedProfile?.[fromKey] != null) { + values[toKey] = (extendedProfile[fromKey] as string) ?? null; + } + }, + ); + Object.entries(ExtendedProfileImages).forEach( + ([hasuraId, ceramicId]) => { + const fromKey = ceramicId as Values; + const toKey = hasuraId as keyof typeof ExtendedProfileImages; + if (extendedProfile?.[fromKey] != null) { + values[toKey] = (extendedProfile[ + fromKey + ] as ImageSources).original.src; + } + }, + ); + Object.entries(ExtendedProfileObjects).forEach( + ([hasuraId, ceramicId]) => { + const fromKey = ceramicId as Values; + const toKey = hasuraId as keyof typeof ExtendedProfileObjects; + if (extendedProfile?.[fromKey] != null) { + switch (fromKey) { + case 'availableHours': { + values.availableHours = extendedProfile.availableHours; + break; + } + case 'colorDisposition': { + values.colorMask = + maskFor(extendedProfile.colorDisposition) ?? undefined; + break; + } + default: { + console.info({ fromKey, toKey }); + } + } + } + }, + ); + } + } - await client.UpsertProfileCache({ objects: [values] }); - - // There isn't yet an interface for linking accounts on self.id - const boxProfile = await Box.getProfile(ethAddress); - const verifiedAccounts = await Box.getVerifiedAccounts(boxProfile); - - if (verifiedAccounts.github) { - const { insert_player_account: insert } = await client.UpsertAccount({ - objects: [ - { - player_id: playerId, - type: AccountType_Enum.Github, - identifier: verifiedAccounts.github.username, - }, - ], - }); - if (insert?.affected_rows) { - updatedProfiles.push('github'); - } else if (insert?.affected_rows === undefined) { - // eslint-disable-next-line no-console - console.warn( - `Unable to insert Github user ${verifiedAccounts.github.username} for playerId ${playerId}`, - ); + if (!basicProfile && !extendedProfile) { + console.info(`No Profile Information For ${ethereumAddress}.`); + } else { + try { + fields.push(...Object.keys(values)); + values.playerId = playerId; + + await client.UpsertProfile({ + objects: [values], + updateColumns: fields as Profile_Update_Column[], + }); + } catch (err) { + if ( + !(err as Error).message.includes( + 'violates unique constraint "profile_username_key"', + ) + ) { + throw err; + } else { + // this is brittle and likely subject to exploit + values.username = `${values.username}-${( + did ?? ethereumAddress + ).slice(-8)}`; + + await client.UpsertProfile({ + objects: [values], + updateColumns: fields as Profile_Update_Column[], + }); + } + } } - } - if (verifiedAccounts.twitter) { - const { insert_player_account: insert } = await client.UpsertAccount({ - objects: [ - { - player_id: playerId, - type: AccountType_Enum.Twitter, - identifier: verifiedAccounts.twitter.username, - }, - ], - }); - if (insert?.affected_rows) { - updatedProfiles.push('twitter'); - } else if (insert?.affected_rows === undefined) { - // eslint-disable-next-line no-console - console.warn( - `Unable to insert Twitter user ${verifiedAccounts.twitter.username} for playerId ${playerId}`, + if (did) { + const alsoKnownAs = ((await store.get('alsoKnownAs', did)) ?? + {}) as AlsoKnownAs; + const { accounts = [] } = alsoKnownAs; + + await Promise.all( + accounts?.map(async ({ host, id: username }: Account) => { + const service = host + ?.replace(/\.com$/, '') + .toUpperCase() as AccountType_Enum; + if (!service) { + console.error(`No hostname for AlsoKnownAs: "${host}"`); + } else { + // If the account has been registered previously, this will + // destructively assign it to the current user removing any + // other users. + // + // ToDo: Examine the JWT to validate that it came from a + // trusted source. Specifically, either the IdentityLink + // service backing //self.id or one established by MetaGame + const { + insert_player_account: insert, + } = await client.UpsertAccount({ + objects: [ + { + playerId, + type: service, + identifier: username, + }, + ], + }); + if (insert?.affected_rows === undefined) { + // eslint-disable-next-line no-console + console.warn( + `Unable to insert ${service} user ${username} for playerId ${playerId}.`, + ); + } else if (insert.affected_rows > 0) { + accountLinks.push(service); + } + } + }), ); } + } catch (err) { + if (!(err as Error).message.includes('No DID')) { + throw err; + } } return { success: true, - updatedProfiles, + ceramic: CONFIG.ceramicURL, + did, + ethereumAddress, + accountLinks, + fields, }; }; diff --git a/packages/backend/src/handlers/actions/idxCache/updateSingleProfile/handler.ts b/packages/backend/src/handlers/actions/idxCache/updateSingleProfile/handler.ts index 199292a35a..616c2db427 100644 --- a/packages/backend/src/handlers/actions/idxCache/updateSingleProfile/handler.ts +++ b/packages/backend/src/handlers/actions/idxCache/updateSingleProfile/handler.ts @@ -1,33 +1,25 @@ import { Request, Response } from 'express'; -import updateCachedProfile from '../updateSingle'; +import { queueRecache } from '../../../../lib/cacheHelper'; export default async (req: Request, res: Response): Promise => { - const session = req.body.session_variables; - const role = session['x-hasura-role']; - const playerId = req.body.input?.playerId; + const role = req.body.session_variables['x-hasura-role']; + const { playerId } = req.body.input ?? {}; + const { limiter } = req.app.locals; - if (!['admin', 'player'].includes(role)) { - throw new Error(`Expected Role: admin or player. Got "${role}".`); + if (!['admin', 'player', 'public'].includes(role)) { + throw new Error(`Expected Role: admin, player, or public. Got "${role}".`); } if (!playerId) { - throw new Error('No playerId specified to update.'); + throw new Error('Player Id not specified in updateSingleProfile handler.'); } - if (!req.app.locals.queuedRecacheFor[playerId]) { - req.app.locals.queuedRecacheFor[playerId] = true; - req.app.locals.limiter.schedule(() => - (async () => { - try { - await updateCachedProfile(playerId); - } finally { - req.app.locals.queuedRecacheFor[playerId] = false; - } - })(), - ); - res.json({ success: true }); - } else { - throw new Error('Already queued to be refreshed.'); + if (!limiter) { + throw new Error('Couldn’t find Bottleneck limiter.'); } + + const queued = await queueRecache({ playerId, limiter }); + + res.json({ success: true, queued }); }; diff --git a/packages/backend/src/handlers/actions/migrateSourceCredAccounts/handler.ts b/packages/backend/src/handlers/actions/migrateSourceCredAccounts/handler.ts index ca2033483d..db6dab1153 100644 --- a/packages/backend/src/handlers/actions/migrateSourceCredAccounts/handler.ts +++ b/packages/backend/src/handlers/actions/migrateSourceCredAccounts/handler.ts @@ -1,11 +1,11 @@ import { Constants, + fetch, getLatestEthAddress, isNotNullOrUndefined, } from '@metafam/utils'; import bluebird from 'bluebird'; import { Request, Response } from 'express'; -import fetch from 'node-fetch'; import { SCAccountsData, SCAlias, sourcecred as sc } from 'sourcecred'; import { @@ -30,7 +30,7 @@ const parseAlias = (alias: SCAlias) => { const addressParts = sc.core.graph.NodeAddress.toParts(alias.address); const type = addressParts[1]?.toUpperCase() as AccountType_Enum; - if (VALID_ACCOUNT_TYPES.indexOf(type) < 0) { + if (!VALID_ACCOUNT_TYPES.includes(type)) { return null; } @@ -41,9 +41,11 @@ const parseAlias = (alias: SCAlias) => { identifier, }; } catch (e) { - const error = (e as Error).message; - - console.warn('Unable to parse alias: ', { error, alias }); + // eslint-disable-next-line no-console + console.error('Unable to parse alias:', { + error: (e as Error).message, + alias, + }); return null; } }; @@ -52,17 +54,16 @@ export const migrateSourceCredAccounts = async ( req: Request, res: Response, ): Promise => { - const ledgerRes = await ledgerManager.reloadLedger(); - if (ledgerRes.error) { - throw new Error(`Unable to load ledger: ${ledgerRes.error}`); + const { error: loadError } = await ledgerManager.reloadLedger(); + if (loadError) { + throw new Error(`Unable to load ledger: ${loadError}`); } const force = req.query.force != null; - console.log(`Updating players from sourcecred. Force-insert? ${force}`); + console.debug(`Updating players from sourcecred. Force-insert? ${force}`); - const accountsData: SCAccountsData = await ( - await fetch(Constants.SC_ACCOUNTS_FILE) - ).json(); + const accountsResult = await fetch(Constants.SC_ACCOUNTS_FILE); + const accountsData = (await accountsResult.json()) as SCAccountsData; const accountOnConflict = { constraint: Player_Account_Constraint.AccountIdentifierTypeKey, update_columns: [], @@ -87,13 +88,13 @@ export const migrateSourceCredAccounts = async ( const rank = computeRank(index); const userWeeklyCred = a.cred; - const seasonXp = userWeeklyCred + const seasonXP = userWeeklyCred .slice(-numWeeksInSeason) .reduce((t, c) => t + c, 0); return { - ethereum_address: ethAddress.toLowerCase(), - totalXp: a.totalCred, - seasonXp, + ethereumAddress: ethAddress.toLowerCase(), + totalXP: a.totalCred, + seasonXP, rank, discordId, Accounts: { @@ -112,57 +113,56 @@ export const migrateSourceCredAccounts = async ( accountList, async (player) => { const vars = { - ethAddress: player.ethereum_address, + ethAddress: player.ethereumAddress, rank: player.rank, - totalXp: player.totalXp, - seasonXp: player.seasonXp, + totalXP: player.totalXP, + seasonXP: player.seasonXP, discordId: player.discordId, }; try { - const updateResult = await client.UpdatePlayer(vars); + const { update_player: update } = await client.UpdatePlayer(vars); - let playerId: string; - let affected = updateResult.update_player?.affected_rows; + let playerId: string = update?.returning[0]?.id; + let { affected_rows: affected } = update ?? {}; - if (affected === 0) { + if ((affected ?? 0) > 1) { + throw new Error( + `Multiple players (${affected}) updated incorrectly: ${player.ethereumAddress}`, + ); + } else if (affected === 0) { if (!force) { return player; } - // 'force' indicates we should insert new players if they don't already exist. - const upsertResult = await client.InsertPlayers({ + // 'force' indicates we should insert new players + // if they don't already exist. + const { insert_player: insert } = await client.InsertPlayers({ objects: [ { - username: player.ethereum_address, - ethereum_address: player.ethereum_address, + ethereumAddress: player.ethereumAddress, rank: player.rank, - total_xp: player.totalXp, - season_xp: player.seasonXp, + totalXP: player.totalXP, + seasonXP: player.seasonXP, }, ], }); - affected = upsertResult.insert_player?.affected_rows; - playerId = upsertResult.insert_player?.returning[0]?.id; - } else { - playerId = updateResult.update_player?.returning[0]?.id; - } - if (affected && affected > 1) { - throw new Error('Multiple players updated incorrectly'); + affected = insert?.affected_rows; + playerId = insert?.returning[0]?.id; } if (playerId) { try { await client.UpsertAccount({ objects: player.Accounts.data.map((account) => ({ - player_id: playerId, + playerId, type: account.type, identifier: account.identifier, })), on_conflict: accountOnConflict, }); } catch (accErr) { - console.log( + console.error( 'Error updating accounts for Player', playerId, accErr, diff --git a/packages/backend/src/handlers/actions/quests/createCompletion/createCompletion.ts b/packages/backend/src/handlers/actions/quests/createCompletion/createCompletion.ts index 94ab99362b..091194facb 100644 --- a/packages/backend/src/handlers/actions/quests/createCompletion/createCompletion.ts +++ b/packages/backend/src/handlers/actions/quests/createCompletion/createCompletion.ts @@ -11,19 +11,19 @@ export async function createCompletion( playerId: string, questCompletion: CreateQuestCompletionInput, ): Promise { - if (!questCompletion.submission_link && !questCompletion.submission_text) { + if (!questCompletion.submissionLink && !questCompletion.submissionText) { throw new Error('Must provide at least a submission link or text'); } const { quest_by_pk: quest } = await client.GetQuestById({ - quest_id: questCompletion.quest_id, + questId: questCompletion.questId, }); if (!quest) { - throw new Error('Quest not found'); + throw new Error('Quest not found.'); } if (quest.status !== QuestStatus_Enum.Open) { - throw new Error('Quest must be open'); + throw new Error('Quest must be open.'); } // Personal or unique, check if not already done by player @@ -34,8 +34,8 @@ export async function createCompletion( const { quest_completion: existingQuestCompletions, } = await client.GetQuestCompletions({ - player_id: playerId, - quest_id: questCompletion.quest_id, + playerId, + questId: questCompletion.questId, }); if (existingQuestCompletions.length > 0) { throw new Error( @@ -49,12 +49,12 @@ export async function createCompletion( const { quest_completion: existingQuestCompletions, } = await client.GetLastQuestCompletionForPlayer({ - player_id: playerId, - quest_id: quest.id, + playerId, + questId: quest.id, }); if (existingQuestCompletions.length > 0) { const existingQuestCompletion = existingQuestCompletions[0]; - const submittedAt = new Date(existingQuestCompletion.submitted_at); + const submittedAt = new Date(existingQuestCompletion.submittedAt); const now = new Date(); const diff = +now - +submittedAt; if (diff < quest.cooldown * 1000) { @@ -67,7 +67,7 @@ export async function createCompletion( const questCompletionInput: Quest_Completion_Insert_Input = { ...questCompletion, - completed_by_player_id: playerId, + completedByPlayerId: playerId, }; const createQuestCompletionResult = await client.CreateQuestCompletion({ objects: questCompletionInput, diff --git a/packages/backend/src/handlers/actions/quests/createCompletion/handler.ts b/packages/backend/src/handlers/actions/quests/createCompletion/handler.ts index 412a716fb2..3c44ecfbed 100644 --- a/packages/backend/src/handlers/actions/quests/createCompletion/handler.ts +++ b/packages/backend/src/handlers/actions/quests/createCompletion/handler.ts @@ -1,9 +1,5 @@ import { Request, Response } from 'express'; -import { - CreateQuestCompletionOutput, - Mutation_RootCreateQuestCompletionArgs, -} from '../../../../lib/autogen/hasura-sdk'; import { createCompletion } from './createCompletion'; export const createCompletionHandler = async ( @@ -17,19 +13,16 @@ export const createCompletionHandler = async ( try { if (role !== 'player') { - throw new Error('Expected player role'); + throw new Error(`Expected player role; got ${role}.`); } - const { questCompletion }: Mutation_RootCreateQuestCompletionArgs = input; + const { questCompletion } = input; const result = await createCompletion(playerId, questCompletion); res.json(result); - } catch (e) { - const error = (e as Error).message; - - const errorResponse: CreateQuestCompletionOutput = { + } catch (error) { + res.json({ success: false, - error, - }; - res.json(errorResponse); + error: (error as Error).message, + }); } }; diff --git a/packages/backend/src/handlers/actions/quests/createQuest/createQuest.ts b/packages/backend/src/handlers/actions/quests/createQuest/createQuest.ts index cdaa34d546..54d168de05 100644 --- a/packages/backend/src/handlers/actions/quests/createQuest/createQuest.ts +++ b/packages/backend/src/handlers/actions/quests/createQuest/createQuest.ts @@ -25,7 +25,7 @@ export async function createQuest( } const playerData = await client.GetPlayer({ playerId }); - const ethAddress = playerData.player_by_pk?.ethereum_address; + const ethAddress = playerData.player_by_pk?.ethereumAddress; if (!ethAddress) { throw new Error('Ethereum address not found for player'); } @@ -35,14 +35,14 @@ export async function createQuest( throw new Error('Player not allowed to create quests'); } - const { skills_id: skillsId, ...questValues } = quest; + const { skillsId, ...questValues } = quest; const questInput: Quest_Insert_Input = { ...questValues, repetition: questRepetition, - created_by_player_id: playerId, + createdByPlayerId: playerId, quest_skills: { - data: skillsId.map((s) => ({ skill_id: s })), + data: skillsId.map((s) => ({ skillId: s })), }, }; diff --git a/packages/backend/src/handlers/actions/quests/createQuest/handler.ts b/packages/backend/src/handlers/actions/quests/createQuest/handler.ts index 047edf02ef..46c3395158 100644 --- a/packages/backend/src/handlers/actions/quests/createQuest/handler.ts +++ b/packages/backend/src/handlers/actions/quests/createQuest/handler.ts @@ -1,9 +1,6 @@ import { Request, Response } from 'express'; -import { - CreateQuestOutput, - Mutation_RootCreateQuestArgs, -} from '../../../../lib/autogen/hasura-sdk'; +import { Mutation_RootCreateQuestArgs } from '../../../../lib/autogen/hasura-sdk'; import { createQuest } from './createQuest'; export const createQuestHandler = async ( @@ -23,13 +20,10 @@ export const createQuestHandler = async ( const createQuestArgs: Mutation_RootCreateQuestArgs = input; const result = await createQuest(playerId, createQuestArgs.quest); res.json(result); - } catch (e) { - const error = (e as Error).message; - - const errorResponse: CreateQuestOutput = { + } catch (error) { + res.json({ success: false, - error, - }; - res.json(errorResponse); + error: (error as Error).message, + }); } }; diff --git a/packages/backend/src/handlers/actions/quests/updateCompletion/handler.ts b/packages/backend/src/handlers/actions/quests/updateCompletion/handler.ts index 12687e0bd6..05d55ffde6 100644 --- a/packages/backend/src/handlers/actions/quests/updateCompletion/handler.ts +++ b/packages/backend/src/handlers/actions/quests/updateCompletion/handler.ts @@ -1,9 +1,6 @@ import { Request, Response } from 'express'; -import { - Mutation_RootUpdateQuestCompletionArgs, - UpdateQuestCompletionOutput, -} from '../../../../lib/autogen/hasura-sdk'; +import { Mutation_RootUpdateQuestCompletionArgs as QuestCompletionArgs } from '../../../../lib/autogen/hasura-sdk'; import { updateCompletion } from './updateCompletion'; export const updateCompletionHandler = async ( @@ -20,19 +17,16 @@ export const updateCompletionHandler = async ( throw new Error('Expected player role'); } - const updateCompletionArgs: Mutation_RootUpdateQuestCompletionArgs = input; + const updateCompletionArgs: QuestCompletionArgs = input; const result = await updateCompletion( playerId, updateCompletionArgs.updateData, ); res.json(result); - } catch (e) { - const error = (e as Error).message; - - const errorResponse: UpdateQuestCompletionOutput = { + } catch (error) { + res.json({ success: false, - error, - }; - res.json(errorResponse); + error: (error as Error).message, + }); } }; diff --git a/packages/backend/src/handlers/actions/quests/updateCompletion/updateCompletion.ts b/packages/backend/src/handlers/actions/quests/updateCompletion/updateCompletion.ts index a71b7f0edd..3a9c300b6c 100644 --- a/packages/backend/src/handlers/actions/quests/updateCompletion/updateCompletion.ts +++ b/packages/backend/src/handlers/actions/quests/updateCompletion/updateCompletion.ts @@ -15,13 +15,13 @@ export async function updateCompletion( const { quest_completion_by_pk: questCompletion, } = await client.GetQuestCompletionById({ - quest_completion_id: updateData.quest_completion_id, + quest_completion_id: updateData.questCompletionId, }); if (!questCompletion) { throw new Error('Quest completion not found'); } const { quest_by_pk: quest } = await client.GetQuestById({ - quest_id: questCompletion.quest_id, + questId: questCompletion.questId, }); if (!quest) { throw new Error('Quest not found'); @@ -30,7 +30,7 @@ export async function updateCompletion( if (quest.status !== QuestStatus_Enum.Open) { throw new Error('Quest must be open'); } - if (quest.created_by_player_id !== playerId) { + if (quest.createdByPlayerId !== playerId) { throw new Error('Only quest creator can update a completion'); } if (questCompletion.status !== QuestCompletionStatus_Enum.Pending) { @@ -69,7 +69,7 @@ export async function updateCompletion( } await client.RejectOtherQuestCompletions({ accepted_quest_completion_id: questCompletion.id, - quest_id: quest.id, + questId: quest.id, }); } diff --git a/packages/backend/src/handlers/actions/routes.ts b/packages/backend/src/handlers/actions/routes.ts index 9460f75379..85d29cc729 100644 --- a/packages/backend/src/handlers/actions/routes.ts +++ b/packages/backend/src/handlers/actions/routes.ts @@ -1,5 +1,4 @@ import express from 'express'; -import multer from 'multer'; import { asyncHandlerWrapper } from '../../lib/apiHelpers'; import { syncAllGuildDiscordMembers } from '../triggers/syncDiscordGuildMembers'; @@ -7,9 +6,6 @@ import { guildRoutes } from './guild/routes'; import { cacheRoutes } from './idxCache/routes'; import { migrateSourceCredAccounts } from './migrateSourceCredAccounts/handler'; import { questsRoutes } from './quests/routes'; -import web3StorageUpload from './storage/handler'; - -const upload = multer({ dest: 'uploads/profile/' }); export const actionRoutes = express.Router(); @@ -27,9 +23,3 @@ actionRoutes.post( actionRoutes.use('/quests', questsRoutes); actionRoutes.use('/guild', guildRoutes); - -const cpUpload = upload.fields([ - { name: 'image', maxCount: 1 }, - { name: 'background', maxCount: 1 }, -]); -actionRoutes.post('/storage', cpUpload, web3StorageUpload); diff --git a/packages/backend/src/handlers/actions/storage/handler.ts b/packages/backend/src/handlers/actions/storage/handler.ts deleted file mode 100644 index c296e9b9d4..0000000000 --- a/packages/backend/src/handlers/actions/storage/handler.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { Request, Response } from 'express'; -import fs from 'fs'; -import Path from 'path'; -import { Web3Storage } from 'web3.storage'; - -import { CONFIG } from '../../../config'; - -export default async (req: Request, res: Response): Promise => { - const storage = new Web3Storage({ token: CONFIG.web3StorageToken }); - const input = req.files as Record>; - - const files = Object.entries(input).map(([key, [{ path }]]) => ({ - name: key, - stream: () => - (fs.createReadStream( - Path.isAbsolute(path) ? path : Path.join(process.cwd(), path), - ) as unknown) as ReadableStream, - })); - - const cid = await storage.put(files); - - const uploadedFiles = Object.fromEntries( - Object.keys(input).map((key: string) => [key, `${cid}/${key}`]), - ); - return res.json(uploadedFiles); -}; diff --git a/packages/backend/src/handlers/auth-webhook/users.ts b/packages/backend/src/handlers/auth-webhook/users.ts index b35a72fdfe..b8fa36c1da 100644 --- a/packages/backend/src/handlers/auth-webhook/users.ts +++ b/packages/backend/src/handlers/auth-webhook/users.ts @@ -1,26 +1,26 @@ import { client } from '../../lib/hasuraClient'; async function createPlayer(ethAddress: string) { - const resProfile = await client.CreatePlayerFromETH({ - ethereum_address: ethAddress, - username: ethAddress, + const { insert_profile: insert } = await client.CreatePlayerFromETH({ + ethereumAddress: ethAddress, }); - if (resProfile.insert_player?.affected_rows !== 1) { + if (insert?.affected_rows !== 1) { throw new Error('Error while creating player'); } - return resProfile.insert_player.returning[0]; + return insert.returning[0]; } export async function getOrCreatePlayer(ethereumAddress: string) { const ethAddress = ethereumAddress.toLowerCase(); const res = await client.GetPlayerFromETH({ - ethereum_address: ethAddress, + ethereumAddress: ethAddress, }); let player = res.player[0]; if (!player) { - player = await createPlayer(ethAddress); + const profile = await createPlayer(ethAddress); + ({ player } = profile); } return player; diff --git a/packages/backend/src/handlers/graphql/mutations.ts b/packages/backend/src/handlers/graphql/mutations.ts index 1e85baba20..f367b0276d 100644 --- a/packages/backend/src/handlers/graphql/mutations.ts +++ b/packages/backend/src/handlers/graphql/mutations.ts @@ -1,16 +1,18 @@ -import { gql } from 'graphql-request/dist'; - // eslint-disable-next-line @typescript-eslint/no-unused-expressions -gql` - mutation CreatePlayerFromETH($ethereum_address: String!, $username: String!) { - insert_player( - objects: { username: $username, ethereum_address: $ethereum_address } +/* GraphQL */ ` + mutation CreatePlayerFromETH($ethereumAddress: String!) { + insert_profile( + objects: [ + { player: { data: { ethereumAddress: $ethereumAddress } } } + ] ) { affected_rows returning { id - username - ethereum_address + player { + id + ethereumAddress + } } } } @@ -19,7 +21,7 @@ gql` $objects: [player_account_insert_input!]! $on_conflict: player_account_on_conflict = { constraint: Account_identifier_type_key - update_columns: [] + update_columns: [playerId] } ) { insert_player_account(objects: $objects, on_conflict: $on_conflict) { @@ -27,24 +29,17 @@ gql` } } - mutation UpsertProfileCache( - $objects: [profile_cache_insert_input!]! - $onConflict: profile_cache_on_conflict = { - constraint: profile_cache_player_id_key - update_columns: [ - name - description - emoji - imageURL - backgroundImageURL - gender - location - country - website - ] - } + mutation UpsertProfile( + $objects: [profile_insert_input!]! + $updateColumns: [profile_update_column!]! ) { - insert_profile_cache(on_conflict: $onConflict, objects: $objects) { + insert_profile( + objects: $objects, + on_conflict: { + constraint: profile_player_id_key + update_columns: $updateColumns + } + ) { affected_rows } } @@ -52,25 +47,27 @@ gql` mutation UpdatePlayer( $ethAddress: String $rank: PlayerRank_enum - $totalXp: numeric - $seasonXp: numeric + $totalXP: numeric + $seasonXP: numeric $discordId: String ) { update_player( - where: { ethereum_address: { _eq: $ethAddress } } + where: { ethereumAddress: { _eq: $ethAddress } } _set: { - ethereum_address: $ethAddress + ethereumAddress: $ethAddress rank: $rank - total_xp: $totalXp - season_xp: $seasonXp - discord_id: $discordId + totalXP: $totalXP + seasonXP: $seasonXP + discordId: $discordId } ) { affected_rows returning { id - ethereum_address - username + ethereumAddress + profile { + username + } } } } @@ -94,14 +91,14 @@ gql` } `; -export const CreateQuestCompletion = gql` +export const CreateQuestCompletion = /* GraphQL */ ` mutation CreateQuestCompletion($objects: [quest_completion_insert_input!]!) { insert_quest_completion(objects: $objects) { affected_rows returning { id - quest_id - completed_by_player_id + questId + completedByPlayerId } } } @@ -129,13 +126,13 @@ export const CreateQuestCompletion = gql` mutation RejectOtherQuestCompletions( $accepted_quest_completion_id: uuid! - $quest_id: uuid! + $questId: uuid! ) { update_quest_completion( where: { _and: [ { id: { _neq: $accepted_quest_completion_id } } - { quest_id: { _eq: $quest_id } } + { questId: { _eq: $questId } } ] } _set: { status: REJECTED } @@ -183,7 +180,7 @@ export const CreateQuestCompletion = gql` $membersToAdd: [guild_player_insert_input!]! ) { delete_guild_player( - where: { Player: { discord_id: { _in: $memberDiscordIdsToRemove } } } + where: { Player: { discordId: { _in: $memberDiscordIdsToRemove } } } ) { affected_rows } diff --git a/packages/backend/src/handlers/graphql/queries.ts b/packages/backend/src/handlers/graphql/queries.ts index 0b50f86bfe..64ee993184 100644 --- a/packages/backend/src/handlers/graphql/queries.ts +++ b/packages/backend/src/handlers/graphql/queries.ts @@ -1,13 +1,13 @@ -import { gql } from 'graphql-request/dist'; - // eslint-disable-next-line @typescript-eslint/no-unused-expressions -gql` +/* GraphQL */ ` query GetPlayer($playerId: uuid!) { player_by_pk(id: $playerId) { id - username - ethereum_address - discord_id + ethereumAddress + discordId + profile { + username + } accounts { identifier type @@ -15,68 +15,68 @@ gql` } } - query GetPlayerFromETH($ethereum_address: String) { - player(where: { ethereum_address: { _eq: $ethereum_address } }) { + query GetPlayerFromETH($ethereumAddress: String) { + player(where: { ethereumAddress: { _eq: $ethereumAddress } }) { id } } query GetPlayersByDiscordId($discordIds: [String!]!) { - player(where: { discord_id: { _in: $discordIds } }) { + player(where: { discordId: { _in: $discordIds } }) { id } } - query GetQuestById($quest_id: uuid!) { - quest_by_pk(id: $quest_id) { + query GetQuestById($questId: uuid!) { + quest_by_pk(id: $questId) { id cooldown status repetition - created_by_player_id + createdByPlayerId } } - query GetQuestCompletions($quest_id: uuid!, $player_id: uuid!) { + query GetQuestCompletions($questId: uuid!, $playerId: uuid!) { quest_completion( where: { - quest_id: { _eq: $quest_id } - completed_by_player_id: { _eq: $player_id } + questId: { _eq: $questId } + completedByPlayerId: { _eq: $playerId } } ) { id - quest_id - completed_by_player_id + questId + completedByPlayerId } } query GetQuestCompletionById($quest_completion_id: uuid!) { quest_completion_by_pk(id: $quest_completion_id) { id - quest_id - completed_by_player_id + questId + completedByPlayerId status } } - query GetLastQuestCompletionForPlayer($quest_id: uuid!, $player_id: uuid!) { + query GetLastQuestCompletionForPlayer($questId: uuid!, $playerId: uuid!) { quest_completion( limit: 1 - order_by: { submitted_at: desc } + order_by: { submittedAt: desc } where: { - quest_id: { _eq: $quest_id } - completed_by_player_id: { _eq: $player_id } + questId: { _eq: $questId } + completedByPlayerId: { _eq: $playerId } } ) { id - quest_id - completed_by_player_id - submitted_at + questId + completedByPlayerId + submittedAt } } `; -export const GuildFragment = gql` +export const GuildFragment = /* GraphQL */ ` fragment GuildFragment on guild { id guildname @@ -94,7 +94,7 @@ export const GuildFragment = gql` `; // eslint-disable-next-line @typescript-eslint/no-unused-expressions -gql` +/* GraphQL */ ` query GetGuild($id: uuid!) { guild(where: { id: { _eq: $id } }) { ...GuildFragment @@ -136,7 +136,7 @@ gql` guild_players { Player { id - discord_id + discordId } } } @@ -150,7 +150,7 @@ gql` ) { Player { id - discord_id + discordId } Guild { id @@ -160,13 +160,13 @@ gql` } `; -export const GetCacheEntries = gql` +export const GetCacheEntries = /* GraphQL */ ` query GetCacheEntries($updatedBefore: timestamptz!) { - profile_cache( + profile( where: { _or: [ - { last_checked_at: { _lt: $updatedBefore } } - { last_checked_at: { _is_null: true } } + { lastCheckedAt: { _lt: $updatedBefore } } + { lastCheckedAt: { _is_null: true } } ] } ) { diff --git a/packages/backend/src/handlers/remote-schemas/resolvers/brightId/resolver.ts b/packages/backend/src/handlers/remote-schemas/resolvers/brightId/resolver.ts index cc04a24966..31d44adca3 100644 --- a/packages/backend/src/handlers/remote-schemas/resolvers/brightId/resolver.ts +++ b/packages/backend/src/handlers/remote-schemas/resolvers/brightId/resolver.ts @@ -1,7 +1,7 @@ -import fetch from 'node-fetch'; +import { fetch } from '@metafam/utils'; import { CONFIG } from '../../../../config'; -import { QueryResolvers } from '../../autogen/types'; +import { BrightIdStatus, QueryResolvers } from '../../autogen/types'; const CONTEXT = 'MetaGame'; @@ -16,7 +16,7 @@ export const getBrightIdStatus: QueryResolvers['getBrightIdStatus'] = async ( try { const response = await fetch(`${ENDPOINT}/${contextId}`); if (!response.ok) return null; - const responseData = await response.json(); + const responseData = (await response.json()) as { data: BrightIdStatus }; return responseData.data; } catch (err) { return null; diff --git a/packages/backend/src/handlers/remote-schemas/resolvers/discord/resolver.ts b/packages/backend/src/handlers/remote-schemas/resolvers/discord/resolver.ts index 356829a2b0..b24a3eb3ec 100644 --- a/packages/backend/src/handlers/remote-schemas/resolvers/discord/resolver.ts +++ b/packages/backend/src/handlers/remote-schemas/resolvers/discord/resolver.ts @@ -10,15 +10,14 @@ export const getGuildDiscordRoles: QueryResolvers['getGuildDiscordRoles'] = asyn if (!guildDiscordId) return []; const discordClient = await createDiscordClient(); - const discordGuild = await discordClient.guilds.fetch(guildDiscordId); if (discordGuild != null) { await discordGuild.roles.fetch(); - return discordGuild.roles.cache.map((role) => ({ - id: role.id, - position: role.position, - name: role.name, + return discordGuild.roles.cache.map(({ id, position, name }) => ({ + id, + position, + name, })); } @@ -36,13 +35,13 @@ export const getDiscordServerMemberRoles: QueryResolvers['getDiscordServerMember const guildDiscordId = getGuildPlayerResponse.guild_player[0].Guild.discord_id; const playerDiscordId = - getGuildPlayerResponse.guild_player[0].Player.discord_id; + getGuildPlayerResponse.guild_player[0].Player.discordId; if (guildDiscordId == null || playerDiscordId == null) return []; const discordClient = await createDiscordClient(); - const discordGuild = await discordClient.guilds.fetch(guildDiscordId); + if (discordGuild != null) { await discordGuild.members.fetch(playerDiscordId); await discordGuild.roles.fetch(); @@ -52,10 +51,10 @@ export const getDiscordServerMemberRoles: QueryResolvers['getDiscordServerMember // these are returned in descending order by position // (meaning, most significant role is first) - return member.roles.cache.map((role) => ({ - id: role.id, - position: role.position, - name: role.name, + return member.roles.cache.map(({ id, position, name }) => ({ + id, + position, + name, })); } diff --git a/packages/backend/src/handlers/remote-schemas/resolvers/getBoxProfile/resolver.ts b/packages/backend/src/handlers/remote-schemas/resolvers/getBoxProfile/resolver.ts deleted file mode 100644 index dfe7bb8a5a..0000000000 --- a/packages/backend/src/handlers/remote-schemas/resolvers/getBoxProfile/resolver.ts +++ /dev/null @@ -1,71 +0,0 @@ -import Box, { CollectiblesFavorite, Image } from '3box'; - -import { CONFIG } from '../../../../config'; -import { - optimizeImage, - OptimizeImageParams, -} from '../../../../lib/imageHelpers'; -import { QueryResolvers } from '../../autogen/types'; - -export const getBoxProfile: QueryResolvers['getBoxProfile'] = async ( - _, - { address }, -) => { - if (!address) return null; - - const boxProfile = await Box.getProfile(address); - - if (Object.keys(boxProfile).length === 0) { - return null; - } - - return { - ethereumAddress: address, - name: boxProfile.name, - description: boxProfile.description, - location: boxProfile.location, - job: boxProfile.job, - emoji: boxProfile.emoji, - imageURL: getImage(boxProfile?.image, { - ar: '1:1', - height: 200, - }), - coverImageURL: getImage(boxProfile?.coverPhoto, { - height: 300, - }), - website: boxProfile.website, - collectiblesFavorites: getCollectiblesFavourites( - boxProfile.collectiblesFavorites, - ), - }; -}; - -function getImage( - image: Image[] | null | undefined, - opts: OptimizeImageParams, -) { - const imageHash = image?.[0]?.contentUrl?.['/']; - if (imageHash) { - return optimizeImage(`${CONFIG.ipfsEndpoint}/ipfs/${imageHash}`, opts); - } - return ''; -} - -type CollectibleAddress = { - [network: string]: string; -}; - -function getCollectiblesFavourites( - collectiblesFavorites: Array | null | undefined, -) { - if (!collectiblesFavorites) return []; - return collectiblesFavorites - .map(({ address, token_id }) => ({ - address: - typeof address === 'string' - ? address - : (address as CollectibleAddress).mainnet || null, - tokenId: token_id, - })) - .filter((c) => !!c.address); -} diff --git a/packages/backend/src/handlers/remote-schemas/schema.ts b/packages/backend/src/handlers/remote-schemas/schema.ts index ebf9010689..a8fc79e92a 100644 --- a/packages/backend/src/handlers/remote-schemas/schema.ts +++ b/packages/backend/src/handlers/remote-schemas/schema.ts @@ -6,7 +6,6 @@ import { getDiscordServerMemberRoles, getGuildDiscordRoles, } from './resolvers/discord/resolver'; -import { getBoxProfile } from './resolvers/getBoxProfile/resolver'; import { getTokenBalances, getTopPSeedHolders, @@ -16,7 +15,6 @@ import { uuid } from './types/uuid'; const resolvers = { Query: { - getBoxProfile, getDaoHausMemberships, getBrightIdStatus, getTokenBalances, diff --git a/packages/backend/src/handlers/remote-schemas/typeDefs.ts b/packages/backend/src/handlers/remote-schemas/typeDefs.ts index 3025c7fd8b..4eca94aed3 100644 --- a/packages/backend/src/handlers/remote-schemas/typeDefs.ts +++ b/packages/backend/src/handlers/remote-schemas/typeDefs.ts @@ -4,7 +4,6 @@ export const typeDefs = gql` scalar uuid type Query { - getBoxProfile(address: String): BoxProfile getDaoHausMemberships(memberAddress: String): [Member!]! getBrightIdStatus(contextId: uuid): BrightIdStatus getTokenBalances(address: String): TokenBalances @@ -23,19 +22,6 @@ export const typeDefs = gql` contextIds: [String!]! } - type BoxProfile { - ethereumAddress: String - name: String - description: String - location: String - job: String - emoji: String - imageURL: String - coverImageURL: String - website: String - collectiblesFavorites: [CollectiblesFavorites!] - } - type CollectiblesFavorites { address: String tokenId: String diff --git a/packages/backend/src/handlers/triggers/cacheIDXProfile.ts b/packages/backend/src/handlers/triggers/cacheIDXProfile.ts index cbc71bd155..7574ed967a 100644 --- a/packages/backend/src/handlers/triggers/cacheIDXProfile.ts +++ b/packages/backend/src/handlers/triggers/cacheIDXProfile.ts @@ -1,16 +1,17 @@ +import Bottleneck from 'bottleneck'; + import { Player } from '../../lib/autogen/hasura-sdk'; -import updateCachedProfile from '../actions/idxCache/updateSingle'; +import { queueRecache } from '../../lib/cacheHelper'; import { TriggerPayload } from './types'; // This trigger is called when new accounts are created. -// It skips the update queue associated with the normal -// cache invalidation process. -export const cacheIDXProfile = async (payload: TriggerPayload) => { - const address = payload.event.data.new?.ethereum_address; - - if (!address) return; - - const playerId = payload.event.data.new?.id; +export const cacheIDXProfile = async ( + payload: TriggerPayload, + limiter: Bottleneck, +) => { + const { id: playerId } = payload.event.data.new ?? {}; - await updateCachedProfile(playerId); + if (playerId) { + await queueRecache({ playerId, limiter }); + } }; diff --git a/packages/backend/src/handlers/triggers/handler.ts b/packages/backend/src/handlers/triggers/handler.ts index 7156068868..6b20b4cd55 100644 --- a/packages/backend/src/handlers/triggers/handler.ts +++ b/packages/backend/src/handlers/triggers/handler.ts @@ -24,15 +24,15 @@ export const triggerHandler = async ( res: Response, ): Promise => { const role = req.body.event?.session_variables?.['x-hasura-role']; - if (role !== 'admin') { throw new Error('Unauthorized'); } + const { limiter } = req.app.locals; const trigger = TRIGGERS[req.body.trigger.name as keyof typeof TRIGGERS]; if (trigger) { - await trigger(req.body); + await trigger(req.body, limiter); res.sendStatus(200); } else { res.sendStatus(404); diff --git a/packages/backend/src/handlers/triggers/playerRankUpdated.ts b/packages/backend/src/handlers/triggers/playerRankUpdated.ts index e9eb90773e..2f0a25f2c2 100644 --- a/packages/backend/src/handlers/triggers/playerRankUpdated.ts +++ b/packages/backend/src/handlers/triggers/playerRankUpdated.ts @@ -21,7 +21,7 @@ export const playerRankUpdated = async (payload: TriggerPayload) => { const { old: oldPlayer, new: newPlayer } = payload.event.data; console.log( - `updateDiscordRole action triggered for player (username=${newPlayer?.username})`, + `updateDiscordRole action triggered for player (username=${newPlayer?.profile?.username})`, ); try { @@ -30,7 +30,7 @@ export const playerRankUpdated = async (payload: TriggerPayload) => { const getPlayerResponse = await client.GetPlayer({ playerId: newPlayer.id, }); - const playerDiscordId = getPlayerResponse.player_by_pk?.discord_id; + const playerDiscordId = getPlayerResponse.player_by_pk?.discordId; if (playerDiscordId == null) return; const newRank = newPlayer?.rank; @@ -95,7 +95,9 @@ export const playerRankUpdated = async (payload: TriggerPayload) => { } if (removedRole) { - console.log(`${newPlayer?.username}: removed role ${removedRole}`); + console.log( + `${newPlayer?.profile?.username}: removed role ${removedRole}`, + ); } // Add the new rank. @@ -104,7 +106,7 @@ export const playerRankUpdated = async (payload: TriggerPayload) => { console.warn(`Discord role associated with ${newRank} was not found!`); } else { await discordPlayer.roles.add([discordRoleForRank]); - console.log(`${newPlayer?.username}: added role ${newRank}`); + console.log(`${newPlayer?.profile?.username}: added role ${newRank}`); } } catch (e) { console.error(e); diff --git a/packages/backend/src/handlers/triggers/playerRoleChanged.ts b/packages/backend/src/handlers/triggers/playerRoleChanged.ts index 6c898b4471..b65e682e4f 100644 --- a/packages/backend/src/handlers/triggers/playerRoleChanged.ts +++ b/packages/backend/src/handlers/triggers/playerRoleChanged.ts @@ -23,17 +23,16 @@ export const playerRoleChanged = async ( ) => { if (CONFIG.nodeEnv !== 'production') return; - const { old: oldPlayerRole, new: newPlayerRole } = payload.event.data; + const { old: oldRole, new: newRole } = payload.event.data; - const playerId = newPlayerRole?.player_id || oldPlayerRole?.player_id; + const playerId = newRole?.player_id || oldRole?.player_id; try { - const getPlayerResponse = await client.GetPlayer({ - playerId, - }); - const playerDiscordId = getPlayerResponse.player_by_pk?.discord_id; - const playerUsername = getPlayerResponse.player_by_pk?.username; - if (playerDiscordId == null) return; + const { player_by_pk: playerByPK } = await client.GetPlayer({ playerId }); + const { discordId } = playerByPK ?? {}; + const { username } = playerByPK?.profile ?? {}; + + if (discordId == null) return; // instantiate discord client. We'll need serverId, playerId, and roleIds const discordClient = await createDiscordClient(); @@ -54,31 +53,27 @@ export const playerRoleChanged = async ( getGuildResponse.guild[0]?.metadata?.discord_metadata; const roleIds = metadata.playerRoles as RoleIds; - const discordPlayer = await guild.members.fetch(playerDiscordId); + const discordPlayer = await guild.members.fetch(discordId); if (discordPlayer == null) { console.warn( - `No discord player with ID ${playerDiscordId} found in server ${guild.name}!`, + `No discord player with ID ${discordId} found in server ${guild.name}!`, ); return; } - if (oldPlayerRole != null && newPlayerRole == null) { - const roleId = roleIds[oldPlayerRole.role]; + if (oldRole != null && newRole == null) { + const roleId = roleIds[oldRole.role]; // this throws a typeerror if the player doesn't actually have the role const success = await discordPlayer.roles.remove(roleId); if (success) { - console.debug( - `Removed role ${oldPlayerRole.role} for player ${playerUsername}`, - ); + console.debug(`Removed role ${oldRole.role} for player ${username}`); } - } else if (oldPlayerRole == null && newPlayerRole != null) { - const roleId = roleIds[newPlayerRole.role]; + } else if (oldRole == null && newRole != null) { + const roleId = roleIds[newRole.role]; console.log(roleId); const success = await discordPlayer.roles.add([roleId]); if (success) { - console.debug( - `Added role ${newPlayerRole.role} for player ${playerUsername}`, - ); + console.debug(`Added role ${newRole.role} for player ${username}`); } } } catch (e) { diff --git a/packages/backend/src/handlers/triggers/syncDiscordGuildMembers.ts b/packages/backend/src/handlers/triggers/syncDiscordGuildMembers.ts index 9dc6f4f740..5cbb84ccd0 100644 --- a/packages/backend/src/handlers/triggers/syncDiscordGuildMembers.ts +++ b/packages/backend/src/handlers/triggers/syncDiscordGuildMembers.ts @@ -100,8 +100,8 @@ const syncGuildMembers = async (guild: GuildFragmentFragment) => { id: guild.id, }); const guildMemberDiscordIds = getGuildMembersResponse.guild[0].guild_players - .filter((p) => p.Player.discord_id != null) - .map((p) => p.Player.discord_id) as string[]; + .filter((p) => p.Player.discordId != null) + .map((p) => p.Player.discordId) as string[]; await discordGuild.members.fetch(); diff --git a/packages/backend/src/index.ts b/packages/backend/src/index.ts index f6df713086..e0956ef307 100644 --- a/packages/backend/src/index.ts +++ b/packages/backend/src/index.ts @@ -1,30 +1,54 @@ -// import bodyParser from 'body-parser'; -import bodyParser from 'body-parser'; import Bottleneck from 'bottleneck'; import cors from 'cors'; import express from 'express'; import { CONFIG } from './config'; import { router } from './handlers/routes'; + // import { errorMiddleware } from './lib/apiHelpers'; const app = express(); -app.locals.limiter = new Bottleneck({ - maxConcurrent: 10, -}); -// tracks the current contents of Bottleneck -app.locals.queuedRecacheFor = {}; - -app.use(bodyParser.json()); +app.use(express.urlencoded({ extended: true })); +app.use(express.json()); app.use(cors({ credentials: true, origin: true })); app.disable('x-powered-by'); -app.use(cors({ credentials: true, origin: true })); +app.locals.limiter = new Bottleneck({ + maxConcurrent: 30, + minTime: 100, // time to wait between launching jobs (100 = 10 / second) +}); -app.disable('x-powered-by'); +// This isn't getting called on failure. Further debugging is needed. +app.locals.limiter.on( + 'failed', + (error: string, jobInfo: Record) => { + console.warn({ + msg: `JOB FAILED ERROR: "${error}"`, + etime: error.includes('connect ETIMEDOUT'), + eagain: error.includes('getaddrinfo EAI_AGAIN'), + refused: error.includes('connect ECONNREFUSED'), + lt20: (jobInfo.retryCount as number) <= 20, + }); + // The daemon has fallen over, wait for it to come back + if ( + (error.includes('connect ETIMEDOUT') || + error.includes('getaddrinfo EAI_AGAIN') || + error.includes('connect ECONNREFUSED')) && + (jobInfo.retryCount as number) <= 20 // sanity check to avoid clogging queue + ) { + console.warn( + `Job Failed: "${error}". Retry #${ + (jobInfo.retryCount as number) + 1 + } in 2s.`, + ); + return 2000; // retry in 2s + } + return null; + }, +); app.use(router); @@ -32,5 +56,5 @@ app.use(router); app.listen(CONFIG.port, () => { // eslint-disable-next-line no-console - console.log(`Listening on port ${CONFIG.port}`); + console.info(`Listening on port ${CONFIG.port}`); }); diff --git a/packages/backend/src/lib/cacheHelper.ts b/packages/backend/src/lib/cacheHelper.ts new file mode 100644 index 0000000000..06641f2705 --- /dev/null +++ b/packages/backend/src/lib/cacheHelper.ts @@ -0,0 +1,38 @@ +import Bottleneck from 'bottleneck'; + +import updateCachedProfile from '../handlers/actions/idxCache/updateSingle'; + +let count = 0; + +export const queueRecache = async ({ + playerId, + limiter, +}: { + playerId: string; + limiter: Bottleneck; +}) => { + if (!playerId) { + throw new Error('No playerId specified to update.'); + } + + const status = limiter.jobStatus(playerId); + if (status) { + console.warn( + `Job for "${playerId}" already exists with status "${status}".`, + ); + } else { + const preRun = count++; + + const result = await limiter.schedule({ id: playerId }, () => + updateCachedProfile(playerId), + ); + console.debug({ + msg: 'Completed Profile Update', + count: preRun, + playerId, + ...result, + }); + } + + return !status; +}; diff --git a/packages/backend/src/lib/colorHelpers.ts b/packages/backend/src/lib/colorHelpers.ts new file mode 100644 index 0000000000..d8092c1f6a --- /dev/null +++ b/packages/backend/src/lib/colorHelpers.ts @@ -0,0 +1,13 @@ +import { Maybe } from '@metafam/utils'; + +export const maskFor = (disposition?: Maybe) => { + if (disposition == null) return null; + + let mask = 0; + if (/w/i.test(disposition)) mask += 0b10000; + if (/u/i.test(disposition)) mask += 0b01000; + if (/b/i.test(disposition)) mask += 0b00100; + if (/r/i.test(disposition)) mask += 0b00010; + if (/g/i.test(disposition)) mask += 0b00001; + return mask; +}; diff --git a/packages/backend/src/lib/imageHelpers.ts b/packages/backend/src/lib/imageHelpers.ts deleted file mode 100644 index 906c7bb16f..0000000000 --- a/packages/backend/src/lib/imageHelpers.ts +++ /dev/null @@ -1,33 +0,0 @@ -import ImgixClient from 'imgix-core-js'; - -import { CONFIG } from '../config'; - -const client = new ImgixClient({ - domain: 'metafam.imgix.net', - secureURLToken: CONFIG.imgixToken, -}); - -export type OptimizeImageParams = { - ar?: string; - fit?: - | 'clamp' - | 'clip' - | 'crop' - | 'facearea' - | 'fill' - | 'fillmax' - | 'max' - | 'min' - | 'scale'; - height?: number; - width?: number; -}; - -export const optimizeImage = ( - url: string, - opts?: OptimizeImageParams, -): string => { - if (!CONFIG.imgixToken) return url; - - return client.buildURL(url, { ...opts }); -}; diff --git a/packages/backend/tsconfig.json b/packages/backend/tsconfig.json index 0d1e4b8e26..d7f1b664dd 100644 --- a/packages/backend/tsconfig.json +++ b/packages/backend/tsconfig.json @@ -1,20 +1,17 @@ { + "$schema": "https://json.schemastore.org/tsconfig", + "display": "MyMeta's Backend", "extends": "../../tsconfig.base.json", "compilerOptions": { - "module": "commonjs", - "rootDir": "src", + "lib": ["ES2021"], + "module": "CommonJS", + "target": "ES2021", "outDir": "dist", - "esModuleInterop": true, + "rootDir": "src", + "composite": true, "tsBuildInfoFile": "dist/.tsbuildinfo" }, - "references": [ - { - "path": "../utils" - }, - { - "path": "../discord-bot" - } - ], - "include": ["./src"], + "references": [{ "path": "../utils" }, { "path": "../discord-bot" }], + "include": ["src", "src/**/*.json"], "exclude": ["./tests"] } diff --git a/packages/design-system/package.json b/packages/design-system/package.json index c7b4c685a0..965d0d03ef 100644 --- a/packages/design-system/package.json +++ b/packages/design-system/package.json @@ -19,17 +19,18 @@ "precommit": "yarn lint-staged" }, "peerDependencies": { - "react": ">=16" + "react": ">=17" }, "dependencies": { "@chakra-ui/icons": "1.0.14", "@chakra-ui/react": "1.6.5", "@chakra-ui/theme-tools": "1.3.1", "@emotion/styled": "11.3.0", + "@metafam/utils": "1.0.1", "@types/react": "17.0.6", "@types/react-dom": "17.0.5", "@types/react-select": "3.0.22", - "city-timezones": "^1.2.0", + "city-timezones": "1.2.0", "framer-motion": "4.1.17", "next": "10.2.3", "react-select": "4.3.1", @@ -40,7 +41,7 @@ "devDependencies": { "@babel/core": "7.14.2", "babel-loader": "8.2.2", - "react-dom": "16.14.0", + "react-dom": "17.0.2", "react-google-font-loader": "1.1.0", "typescript": "4.5.4", "tslib": "2.2.0" diff --git a/packages/design-system/src/BoxedNextImage.tsx b/packages/design-system/src/BoxedNextImage.tsx index 61da0e78ed..6aad713158 100644 --- a/packages/design-system/src/BoxedNextImage.tsx +++ b/packages/design-system/src/BoxedNextImage.tsx @@ -5,17 +5,14 @@ import React from 'react'; export const BoxedNextImage: React.FC< | React.ComponentProps | { src: string | number; alt: string | number } -> = (props) => { - const { src, alt } = props; - return ( - - - - ); -}; +> = ({ src, alt, ...props }) => ( + + + +); diff --git a/packages/design-system/src/LoadingState.tsx b/packages/design-system/src/LoadingState.tsx index 0a9d63b194..baa5dae8a2 100644 --- a/packages/design-system/src/LoadingState.tsx +++ b/packages/design-system/src/LoadingState.tsx @@ -1,12 +1,12 @@ -import { Flex, FlexProps, Spinner } from '@chakra-ui/react'; +import { Center, CenterProps, Spinner } from '@chakra-ui/react'; import React from 'react'; export const LoadingState: React.FC< - FlexProps & { + CenterProps & { color?: string; } > = ({ color = 'purple.500', ...props }) => ( - - - +
+ +
); diff --git a/packages/design-system/src/MetaButton.tsx b/packages/design-system/src/MetaButton.tsx index b1c973fcb0..3f918f0e9d 100644 --- a/packages/design-system/src/MetaButton.tsx +++ b/packages/design-system/src/MetaButton.tsx @@ -16,7 +16,7 @@ export const MetaButton: React.FC< fontSize="sm" bg="purple.400" color="white" - ref={ref} + {...{ ref }} {...props} > {children} diff --git a/packages/design-system/src/MetaFilterSelect.tsx b/packages/design-system/src/MetaFilterSelect.tsx index eab104bc1b..a8a8f9b383 100644 --- a/packages/design-system/src/MetaFilterSelect.tsx +++ b/packages/design-system/src/MetaFilterSelect.tsx @@ -11,15 +11,12 @@ import { useBreakpointValue, } from '@chakra-ui/react'; import React, { useCallback, useRef, useState } from 'react'; +import { Props as ReactSelectProps } from 'react-select'; import { DropDownIcon } from './icons/DropDownIcon'; import { MetaTag } from './MetaTag'; import { SelectComponents, SelectSearch } from './SelectSearch'; -import { - getTimezonesFor, - timezonesFilter, - TimezoneType, -} from './SelectTimeZone'; +import { LabeledValue, timeZonesFilter, TimeZoneType } from './SelectTimeZone'; export const MetaSelect: React.FC = (props) => ( ); +>((props, ref) => ( + + -   - - ) : ( - <>{children || innerValue} - )} -
+ useEffect(() => { + Object.entries(player?.profile ?? {}).forEach(([key, value]) => { + if (!key.startsWith('_')) { + setValue(key, value ?? undefined); + } + }); + }, [player, setValue]); + + const onFileChange = useCallback( + ({ target: input }: { target: HTMLInputElement }) => { + const file = input.files?.[0]; + if (!file) return; + const key = input.name as keyof typeof endpoints; + endpoints[key].setLoading(true); + endpoints[key].setFile(file); + const reader = new FileReader(); + reader.addEventListener('load', () => { + endpoints[key].setURL(reader.result as string); + }); + reader.readAsDataURL(file); + }, + [endpoints], ); -}; -export type CountrySelectDropdownProps = { - country: CountryOption; - onChange: (country: CountryOption | null) => void; -}; + if (!ceramic) { + toast({ + title: 'Ceramic Connection Error', + description: 'Unable to connect to the Ceramic API to save changes.', + status: 'error', + isClosable: true, + duration: 8000, + }); + onClose(); + return null; + } -export type CountryOption = { - label: string; - value: string; -}; + const onSubmit = async (inputs: HasuraProfileProps) => { + try { + if (!ceramic.did?.authenticated) { + setStatus(Authenticating DID…); + await ceramic.did?.authenticate(); + } -export const COUNTRIES_LIST: { [key: string]: string } = { - UK: 'United Kingdom', - US: 'USA', - MX: 'Mexico', -}; -export const COUNTRIES_OPTIONS = Object.keys(COUNTRIES_LIST).map( - (key) => ({ value: key, label: COUNTRIES_LIST[key] } as CountryOption), -); -export const CountrySelectDropdown: FC = ({ - country, - onChange, -}) => ( - - - country - - - { - if (value) onChange(value as CountryOption); - }} - /> - - -); + if (params.query.debug) { + // eslint-disable-next-line no-console + console.debug(`For ETH Address: ${address}`); + // eslint-disable-next-line no-console + console.debug(`Connected DID: ${ceramic.did?.id}`); -export type TimezoneOption = { - label: string; - value: string; -}; -export type TimezoneSelectDropdownProps = { - timezone: TimeZoneOption | undefined; - onChange: (timezone: TimezoneOption | null) => void; -}; + const caip10 = await Caip10Link.fromAccount( + ceramic, + `${address}@eip155:1`, + ); + // eslint-disable-next-line no-console + console.debug(`CAIP-10 DID: ${caip10.did}`); + } -export const TIMEZONES_LIST: { [key: string]: string } = { - '-800': 'GMT-800', - '-700': 'GMT-700', - '-600': 'GMT-600', -}; -export const TIMEZONES_OPTIONS = Object.keys(TIMEZONES_LIST).map( - (key) => ({ value: key, label: TIMEZONES_LIST[key] } as TimezoneOption), -); + const cache = new Map(); + const loader = new TileLoader({ ceramic, cache }); + const manager = new ModelManager(ceramic); + manager.addJSONModel(basicProfileModel); + manager.addJSONModel(extendedProfileModel); -export const EditProfileForm: React.FC = ({ - user, - onClose, -}) => { - const [timeZone, setTimeZone] = useState( - user?.player?.timezone || '', - ); - const [availability, setAvailability] = useState( - user?.player?.availability_hours?.toString() || '', - ); - const [username, setUsername] = useState( - user?.player?.username || '', - ); - const [pronouns, setPronouns] = useState( - user?.player?.pronouns || '', - ); + const store = new DIDDataStore({ + ceramic, + loader, + model: await manager.toPublished(), + }); - const [invalid, setInvalid] = useState(false); - const [updateProfileRes, updateProfile] = useUpdateProfileMutation(); - const toast = useToast(); - const [loading, setLoading] = useState(false); - const [, updateUsername] = useUpdatePlayerUsernameMutation(); + setStatus( + + Uploading images to + + web3.storage + + … + , + ); - useEffect(() => { - const value = Number(availability); - setInvalid(value < 0 || value > 168); - }, [availability]); + const formData = new FormData(); + const files: Record = {}; + const sources: Record = {}; + const values: ProfileProps = { ...inputs }; + Object.keys(Images).forEach((hasuraId) => { + const key = hasuraId as keyof typeof Images; + if (endpoints[key].file) { + files[key] = endpoints[key].file as File; + } + delete values[key]; + }); - // const GRID_SIZE = 2; - // const HALF = GRID_SIZE / 2; + if (params.query.debug) { + // eslint-disable-next-line no-console + console.debug({ inputs, values, files, endpoints }); + } - const save = async () => { - if (!user) return; + const toType = (key: string) => { + const match = key.match(/^(.+?)(Image)?(URL)$/i); + const [name] = match?.slice(1) ?? ['unknown']; + return name; + }; - setLoading(true); + if (Object.keys(files).length > 0) { + Object.entries(files).forEach(([key, file]) => { + formData.append(toType(key), file); + }); + const result = await fetch(`/api/storage`, { + method: 'POST', + body: formData, + credentials: 'include', + }); + const response = await result.json(); + const { error } = response; + if (result.status >= 400 || error) { + throw new Error( + `web3.storage ${result.status} response: "${ + error ?? result.statusText + }"`, + ); + } - const input: InputData = {}; - if (user.player?.availability_hours?.toString() !== availability) { - input.availability_hours = Number(availability); - } - if (user.player?.timezone !== timeZone) { - input.timezone = timeZone; - } - if (user.player?.pronouns !== pronouns) { - input.pronouns = pronouns; - } + Object.keys(files).forEach((key: string) => { + const tKey = toType(key); + if (!response[tKey]) { + toast({ + title: 'Error Saving Image', + description: `Uploaded "${tKey}" & didn't get a response back.`, + status: 'warning', + isClosable: true, + duration: 8000, + }); + } else { + const { val, ref } = endpoints[key]; + let [, mime] = val?.match(/^data:([^;]+);/) ?? []; + mime ??= 'image/*'; - const profile = await updateProfile({ - playerId: user.id, - input, - }); + const elem = ref.current as HTMLImageElement | null; + const props: { width?: number; height?: number } = {}; + ['width', 'height'].forEach((prop) => { + props[prop as 'width' | 'height'] = Math.max( + elem?.[ + `natural${prop[0].toUpperCase()}${prop.slice(1)}` as + | 'naturalWidth' + | 'naturalHeight' + ] ?? 0, + elem?.[prop as 'width' | 'height'] ?? 0, + 1, + ); + }); + sources[key as keyof typeof Images] = { + original: { + src: `ipfs://${response[tKey]}`, + mimeType: mime, + ...props, + }, + } as ImageSources; + } + }); + } - if (profile.error) { - toast({ - title: 'Error', - description: 'Unable to update profile. The octo is sad 😢', - status: 'error', - isClosable: true, + if (params.query.debug) { + // eslint-disable-next-line no-console + console.debug({ files, values, inputs }); + } + + // empty string fails validation + ['residenceCountry', 'birthDate'].forEach((prop) => { + const key = prop as keyof typeof BasicProfileStrings; + if (values[key] === '') { + delete values[key]; + } }); - setLoading(false); - } - if (user.player?.username !== username) { - const usernameResponse = await updateUsername({ - playerId: user.id, - username, + const { countryCode: code }: { countryCode?: string } = values; + if (code?.length === 2) { + values.countryCode = code.toUpperCase(); + } else { + if ((code ?? '').length > 0) { + toast({ + title: 'Country Code Error', + description: `Country Code "${code}" is not the required two letters.`, + status: 'error', + isClosable: true, + duration: 8000, + }); + } + delete values.countryCode; + } + + const basic: BasicProfile = {}; + const extended: ExtendedProfile = {}; + + Object.entries(BasicProfileStrings).forEach(([hasuraId, ceramicId]) => { + const fromKey = ceramicId as Values; + const toKey = hasuraId as keyof typeof BasicProfileStrings; + if (values[toKey] !== undefined) { + basic[fromKey] = (values[toKey] as string) ?? null; + } }); - if (usernameResponse.error) { - let errorDetail = 'The octo is sad 😢'; - if (usernameResponse.error.message.includes('Uniqueness violation')) { - errorDetail = 'This username is already taken 😢'; - } else if ( - usernameResponse.error.message.includes('username_is_valid') - ) { - errorDetail = - 'A username can only contain lowercase letters, numbers, and dashes.'; + Object.entries(BasicProfileImages).forEach(([hasuraId, ceramicId]) => { + const fromKey = ceramicId as Values; + const toKey = hasuraId as keyof typeof BasicProfileImages; + if (sources[toKey] !== undefined) { + basic[fromKey] = sources[toKey] ?? null; } - toast({ - title: 'Error', - description: `Unable to update Player Username. ${errorDetail}`, - status: 'error', - isClosable: true, - }); - setLoading(false); + }); + + setStatus(Updating Basic Profile…); + const basRes = await store.merge('basicProfile', basic); + if (params.query.debug) { + // eslint-disable-next-line no-console + console.info('Basic Profile:', basRes.toUrl()); } - } - onClose(); + Object.entries(ExtendedProfileStrings).forEach( + ([hasuraId, ceramicId]) => { + const fromKey = ceramicId as Values; + const toKey = hasuraId as keyof typeof ExtendedProfileStrings; + if (values[toKey] !== undefined) { + extended[fromKey] = values[toKey]; + } + }, + ); + + Object.entries(ExtendedProfileImages).forEach(([hasuraId, ceramicId]) => { + const fromKey = ceramicId as Values; + const toKey = hasuraId as keyof typeof ExtendedProfileImages; + if (sources[toKey] !== undefined) { + extended[fromKey] = sources[toKey]; + } + }); + + Object.entries(ExtendedProfileObjects).forEach( + ([hasuraId, ceramicId]) => { + const fromKey = ceramicId as Values; + const toKey = hasuraId as keyof typeof ExtendedProfileObjects; + if (values[toKey] !== undefined) { + switch (fromKey) { + case 'availableHours': { + extended[fromKey] = values[toKey] as number; + break; + } + case 'colorDisposition': { + extended[fromKey] = + dispositionFor(values.colorMask) ?? undefined; + break; + } + default: { + // eslint-disable-next-line no-console + console.warn(`Unknown Profile Key: "${fromKey}"`); + } + } + } + }, + ); + + if (params.query.debug) { + // eslint-disable-next-line no-console + console.debug({ values, basic, extended }); + } + + setStatus(Updating Extended Profile…); + const extRes = await store.merge('extendedProfile', extended); + if (params.query.debug) { + // eslint-disable-next-line no-console + console.info('Extended Profile:', extRes.toUrl()); + } + + if (player) { + setStatus(Invalidating Cache…); + await invalidateCache({ playerId: player.id }); + } + + // if they changed their username, the page will 404 on reload + if (player && extended.username !== username) { + router.push(`/player/${player.ethereumAddress}`); + } + + onClose(); + } catch (err) { + toast({ + title: 'Ceramic Error', + description: `Error saving profile: ${(err as Error).message}`, + status: 'error', + isClosable: true, + duration: 15000, + }); + } finally { + setStatus(null); + } }; return ( - - - - setUsername(e.target.value || '')} - /> - - - {/* - - */} - - - - setPronouns(e.target.value || '')} - /> - - - {/* - - - */} - - - - availability - - - - - 🕛 - - - + + + + + + + + + { + endpoints.profileImageURL.setLoading(false); + }} + display={ + endpoints.profileImageURL.loading ? 'none' : 'inherit' + } + src={endpoints.profileImageURL.val} + borderRadius="full" + objectFit="cover" + h="full" + w="full" + border="2px solid" + borderColor={ + endpoints.profileImageURL.active + ? 'blue.400' + : 'transparent' + } + /> + {endpoints.profileImageURL.loading && + (endpoints.profileImageURL.val == null ? ( + + ) : ( + + ))} + + ( + { + onChange(evt.target.files); + onFileChange(evt); + }} + minW="100% !important" + minH="100%" + position="absolute" + top={0} + bottom={0} + left={0} + right={0} + opacity={0} + onFocus={() => endpoints.profileImageURL.setActive(true)} + onBlur={() => endpoints.profileImageURL.setActive(false)} + /> + )} + /> + + + {errors.profileImageURL?.message} + + + + {[ + { + key: 'bannerImageURL', + title: 'Header Banner', + description: + 'An image with an ~3:1 aspect ratio to be displayed as a page or profile banner. 1MiB maximum size.', + }, + { + key: 'backgroundImageURL', + title: 'Page Background', + description: + 'An image with an ~1:1 aspect ratio to be the page background. 1MiB maximum size.', + }, + ].map(({ key, title, description: spec }) => ( + + + + + +
+ { + endpoints[key].setLoading(false); + }} + display={endpoints[key].loading ? 'none' : 'inherit'} + src={endpoints[key].val} + h="full" + w="full" + /> + {endpoints[key].loading && + (endpoints[key].val == null ? ( + + ) : ( + + ))} + ( + { + onChange(evt.target.files); + onFileChange(evt); + }} + maxW="100%" + minH="100%" + position="absolute" + top={0} + bottom={0} + left={0} + right={0} + opacity={0} + onFocus={() => endpoints[key].setActive(true)} + onBlur={() => endpoints[key].setActive(false)} + /> + )} + /> +
+ {errors[key]?.message} +
+
+ ))} + + + + + +