diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..bf50e3d09 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,3 @@ +* @rot1024 +/server/pkg/builtin/manifest.yml @HideBa +/server/pkg/builtin/manifest_ja.yml @HideBa diff --git a/.github/changelog.yml b/.github/changelog.yml new file mode 100644 index 000000000..910a90c37 --- /dev/null +++ b/.github/changelog.yml @@ -0,0 +1,19 @@ +prefixes: + feat: ๐Ÿš€ Features + fix: ๐Ÿ”ง Bug Fixes + docs: ๐Ÿ“– Documentation + doc: ๐Ÿ“– Documentation + perf: โšก๏ธ Performance + refactor: โœจ Refactor + style: ๐ŸŽจ Styling + test: ๐Ÿงช Testing + chore: Miscellaneous Tasks + build: Miscellaneous Tasks + deps: Miscellaneous Tasks + ci: false + revert: false +scopes: + web: Web + server: Server + "": Misc +titleVersionPrefix: remove diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 000000000..4be5e63df --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,4 @@ +web: + - web/**/* +server: + - server/**/* diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..bfcf0a18e --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,12 @@ +# Overview + +## What I've done + + +## What I haven't done + +## How I tested + +## Which point I want you to review particularly + +## Memo diff --git a/.github/renovate.json b/.github/renovate.json new file mode 100644 index 000000000..a35fb4239 --- /dev/null +++ b/.github/renovate.json @@ -0,0 +1,58 @@ +{ + "extends": [ + "config:base", + ":semanticCommits", + ":semanticCommitScopeDisabled", + ":maintainLockFilesWeekly", + ":enableVulnerabilityAlertsWithLabel(security)" + ], + "postUpdateOptions": [ + "gomodTidy", + "gomodUpdateImportPaths" + ], + "packageRules": [ + { + "enabledManagers": [ + "gomod" + ], + "matchPackagePatterns": [ + "*" + ], + "groupName": "dependencies", + "groupSlug": "gomod", + "semanticCommitType": "chore", + "schedule": [ + "before 3:00 am on the 4th day of the month" + ] + }, + { + "enabledManagers": [ + "dockerfile", + "docker-compose" + ], + "matchPackagePatterns": [ + "*" + ], + "groupName": "docker dependencies", + "groupSlug": "docker", + "semanticCommitType": "chore", + "schedule": [ + "before 3:00 am on the 4th day of the month" + ] + }, + { + "enabledManagers": [ + "github-actions" + ], + "matchPackagePatterns": [ + "*" + ], + "groupName": "github actions dependencies", + "groupSlug": "github-actions", + "semanticCommitType": "ci", + "schedule": [ + "before 3am on the fourth day of the month" + ] + } + ] +} \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..864dc1a51 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,186 @@ +name: build +on: + workflow_run: + workflows: [ci] + types: [completed] + branches: [main, release] +concurrency: + group: ${{ github.workflow }}-${{ github.event.workflow_run.head_branch }} + cancel-in-progress: true +jobs: + info: + name: Collect information + runs-on: ubuntu-latest + if: github.event.workflow_run.conclusion != 'failure' && github.event.repository.full_name == 'reearth/reearth' && (github.event.workflow_run.head_branch == 'release' || !startsWith(github.event.head_commit.message, 'v')) + outputs: + sha_short: ${{ steps.info.outputs.sha_short }} + tag: ${{ steps.info.outputs.tag }} + tag_short: ${{ steps.info.outputs.tag_short }} + name: ${{ steps.info.outputs.name }} + steps: + - name: checkout + uses: actions/checkout@v3 + with: + ref: ${{ github.event.workflow_run.head_sha }} + - name: Fetch tags + run: git fetch --prune --unshallow --tags + - name: Get info + id: info + env: + BRANCH: ${{ github.event.workflow_run.head_branch }} + # The tag name should be retrieved lazily, as tagging may be delayed. + run: | + echo "::set-output name=sha_short::$(git rev-parse --short HEAD)" + if [[ "$BRANCH" = "release" ]]; then + TAG=$(git tag --points-at HEAD) + if [[ ! -z "$TAG" ]]; then + echo "::set-output name=tag::$TAG" + echo "::set-output name=tag_short::${TAG#v}" + else + echo "::set-output name=name::rc" + fi + else + echo "::set-output name=name::nightly" + fi + - name: Show info + env: + SHA_SHORT: ${{ steps.info.outputs.sha_short }} + TAG: ${{ steps.info.outputs.tag }} + TAG_SHORT: ${{ steps.info.outputs.tag_short }} + NAME: ${{ steps.info.outputs.name }} + run: echo "sha_short=$SHA_SHORT, tag=$TAG, tag_short=$TAG_SHORT, name=$NAME" + build: + name: Build and release + runs-on: ubuntu-latest + needs: + - info + if: needs.info.outputs.name || needs.info.outputs.tag + env: + ARTIFACTS: server/dist/reearth_*.*,reearth-web_${{ needs.info.outputs.name }}.tar.gz + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + ref: ${{ github.event.workflow_run.head_sha }} + - name: Set up Go + uses: actions/setup-go@v3 + with: + go-version: 1.18 + - name: Fetch reearth-web release + uses: dsaltares/fetch-gh-release-asset@master + with: + repo: reearth/reearth-web + version: tags/${{ needs.info.outputs.name }} + file: reearth-web_${{ needs.info.outputs.name }}.tar.gz + token: ${{ secrets.GITHUB_TOKEN }} + - name: Run GoReleaser + uses: goreleaser/goreleaser-action@v2 + with: + args: release --rm-dist ${{ env.SNAPSHOT }} + workdir: server + env: + SNAPSHOT: ${{ !needs.info.outputs.tag && '--snapshot' || '' }} + GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.tag || '0.0.0' }} + - name: Rename artifacts + if: needs.info.outputs.name + run: for f in $ARTIFACTS; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_${{ needs.info.outputs.name }}/'); done + working-directory: server + - name: List artifacts + run: ls -l server/dist + - name: Release nightly/rc + if: needs.info.outputs.name + uses: ncipollo/release-action@v1 + with: + allowUpdates: true + artifacts: ${{ env.ARTIFACTS }} + commit: ${{ github.sha }} + name: ${{ needs.info.outputs.name }} + tag: ${{ needs.info.outputs.name }} + body: ${{ github.sha }} + prerelease: true + - name: Download latest changelog + if: needs.info.outputs.tag + uses: dawidd6/action-download-artifact@v2 + with: + workflow: release.yml + name: changelog-${{ needs.info.outputs.tag }} + - name: Create GitHub release + if: needs.info.outputs.tag + uses: ncipollo/release-action@v1 + with: + artifacts: ${{ env.ARTIFACTS }} + commit: ${{ github.sha }} + name: ${{ needs.info.outputs.tag }} + tag: ${{ needs.info.outputs.tag }} + bodyFile: CHANGELOG_latest.md + docker: + name: Build and push Docker image + runs-on: ubuntu-latest + needs: + - info + if: needs.info.outputs.name || needs.info.outputs.tag + env: + IMAGE_NAME: reearth/reearth + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + ref: ${{ github.event.workflow_run.head_sha }} + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Login to DockerHub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Get options + id: options + env: + TAG: ${{ needs.info.outputs.tag_short }} + NAME: ${{ needs.info.outputs.name }} + SHA: ${{ needs.info.outputs.sha_short }} + run: | + if [[ -n $TAG ]]; then + PLATFORMS=linux/amd64,linux/arm64 + VERSION=$TAG + TAGS=$IMAGE_NAME:$TAG + if [[ ! $TAG =~ '-' ]]; then + TAGS+=,${IMAGE_NAME}:${TAG%.*} + TAGS+=,${IMAGE_NAME}:${TAG%%.*} + TAGS+=,${IMAGE_NAME}:latest + fi + else + PLATFORMS=linux/amd64 + VERSION=$SHA + TAGS=$IMAGE_NAME:$NAME + fi + echo "::set-output name=platforms::$PLATFORMS" + echo "::set-output name=version::$VERSION" + echo "::set-output name=tags::$TAGS" + - name: Fetch reearth-web release + uses: dsaltares/fetch-gh-release-asset@master + with: + repo: reearth/reearth-web + version: tags/${{ needs.info.outputs.name }} + file: reearth-web_${{ needs.info.outputs.name }}.tar.gz + token: ${{ secrets.GITHUB_TOKEN }} + - name: Extract reearth-web + run: tar -xvf reearth-web_${{ needs.info.outputs.name }}.tar.gz && mv reearth-web web/dist + - name: Build and push docker image + uses: docker/build-push-action@v3 + with: + platforms: ${{ steps.options.outputs.platforms }} + push: true + build-args: VERSION=${{ steps.options.outputs.version }} + tags: ${{ steps.options.outputs.tags }} + cache-from: type=gha + cache-to: type=gha,mode=max + - name: Invoke deploy_test workflow + uses: benc-uk/workflow-dispatch@v1 + if: needs.info.outputs.name == 'nightly' + with: + workflow: deploy_test + token: ${{ secrets.GPT }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000..3425d022e --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,48 @@ +name: CI +on: + push: + branches: [main, release] + pull_request: +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true +jobs: + ci: + name: CI + runs-on: ubuntu-latest + if: github.event_name != 'push' || !startsWith(github.event.head_commit.message, 'v') + services: + mongo: + image: mongo:4.4-focal + ports: + - 27017:27017 + steps: + - name: set up + uses: actions/setup-go@v3 + with: + go-version: 1.18 + - name: checkout + uses: actions/checkout@v3 + - name: cache + uses: actions/cache@v3 + with: + path: ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + - name: golangci-lint + uses: golangci/golangci-lint-action@v3 + with: + version: v1.45 # v1.46 reports an error + args: --timeout=10m + working-directory: server + - name: test + run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic -timeout 10m + env: + REEARTH_DB: mongodb://localhost + working-directory: server + - name: Send coverage report + uses: codecov/codecov-action@v2 + with: + token: ${{ secrets.CODECOV_TOKEN }} + file: coverage.txt diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml new file mode 100644 index 000000000..44c48969a --- /dev/null +++ b/.github/workflows/deploy_test.yml @@ -0,0 +1,35 @@ +name: deploy_test +on: + workflow_dispatch: +concurrency: + group: ${{ github.workflow }} + cancel-in-progress: true +env: + IMAGE: reearth/reearth:nightly + IMAGE_GCP: us.gcr.io/reearth-oss/reearth:nightly + GCP_REGION: us-central1 +jobs: + deploy_test: + name: Deploy app to test env + runs-on: ubuntu-latest + if: github.event.repository.full_name == 'reearth/reearth' + steps: + - uses: google-github-actions/auth@v0 + with: + credentials_json: ${{ secrets.GCP_SA_KEY }} + - name: Set up Cloud SDK + uses: google-github-actions/setup-gcloud@v0 + - name: Configure docker + run: gcloud auth configure-docker --quiet + - name: docker push + run: | + docker pull $IMAGE + docker tag $IMAGE $IMAGE_GCP + docker push $IMAGE_GCP + - name: Deploy to Cloud Run + run: | + gcloud run deploy reearth-backend \ + --image $IMAGE_GCP \ + --region $GCP_REGION \ + --platform managed \ + --quiet diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml deleted file mode 100644 index 4bff71585..000000000 --- a/.github/workflows/nightly.yml +++ /dev/null @@ -1,68 +0,0 @@ -name: Nightly -on: - workflow_dispatch: - schedule: - - cron: '0 0 * * *' -env: - IMAGE: reearth/reearth - IMAGE_BASE: reearth/reearth-backend - TAG: nightly -jobs: - prenightly: - runs-on: ubuntu-latest - env: - EV: ${{ toJSON(github.event) }} - steps: - - run: echo ${{ github.event.repository.full_name }} - - run: echo $EV - nightly: - name: Nightly - runs-on: ubuntu-latest - if: github.event.repository.full_name == 'reearth/reearth' - steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Fetch reearth-web release - uses: dsaltares/fetch-gh-release-asset@master - with: - repo: reearth/reearth-web - version: tags/nightly - file: reearth-web_nightly.tar.gz - token: ${{ secrets.GITHUB_TOKEN }} - - name: Extract reearth-web - run: tar -xvf reearth-web_nightly.tar.gz && mv reearth-web web - - name: Build and push - id: docker_build - uses: docker/build-push-action@v2 - with: - context: . - platforms: linux/amd64,linux/arm64 - build-args: REEARTH_BACKEND_IMAGE=${{ env.IMAGE_BASE }}:${{ env.TAG }} - push: true - tags: ${{ env.IMAGE }}:${{ env.TAG }} - cache-from: type=gha - cache-to: type=gha,mode=max - - name: Image digest - run: echo ${{ steps.docker_build.outputs.digest }} - slack-notification: - if: github.event.repository.full_name == 'reearth/reearth' && always() - name: Slack Notification - needs: - - nightly - runs-on: ubuntu-latest - steps: - - name: Slack Notification - uses: Gamesight/slack-workflow-status@master - if: always() - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/pr_title.yml b/.github/workflows/pr_title.yml new file mode 100644 index 000000000..cdf267d2e --- /dev/null +++ b/.github/workflows/pr_title.yml @@ -0,0 +1,28 @@ +name: PR Title Checker +on: + pull_request: + types: + - opened + - edited + - synchronize + - labeled + - unlabeled +jobs: + pr_title: + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@v2 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - uses: amannn/action-semantic-pull-request@v4 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ignoreLabels: meta + scopes: | + web + server + subjectPattern: ^(?![A-Z]).+$ + subjectPatternError: | + The subject "{subject}" found in the pull request title "{title}" + didn't match the configured pattern. Please ensure that the subject + doesn't start with an uppercase character. diff --git a/.github/workflows/release.js b/.github/workflows/release.js index 86d41395c..eea04df74 100644 --- a/.github/workflows/release.js +++ b/.github/workflows/release.js @@ -1,47 +1,28 @@ const { readFileSync, writeFileSync } = require("fs"); -const repos = ["web", "backend"]; -const header = "# Changelog\nAll notable changes to this project will be documented in this file."; - -module.exports = async ({ github, tag }) => { - const newTag = removeVFromTag(tag); - const releases = await Promise.all(repos.map(r => github.rest.repos.getReleaseByTag({ +module.exports = async ({ github }) => { + const newTag = removeVFromTag(process.env.TAG); + const release = await github.rest.repos.getReleaseByTag({ owner: "reearth", - repo: "reearth-" + r, + repo: "reearth-web", tag: `v${newTag}`, - }))); + }); + const webChangelogLatest = "### Web\n\n" + release.data.body; - // generate CHANGELOG_latest.md - const changelogLatest = repos.flatMap((r, i) => - [`## reearth-${r}`, releases[i].data.body] - ).join("\n"); - writeFileSync("CHANGELOG_latest.md", changelogLatest); + const changelogLatest = readFileSync("CHANGELOG_latest.md", "utf8"); + const newChangelogLatest = webChangelogLatest + "\n\n" + changelogLatest; + writeFileSync("CHANGELOG_latest.md", newChangelogLatest); - // insert new changelog to CHANGELOG.md - let changelog = ""; - try { - changelog = readFileSync("CHANGELOG.md", "utf-8"); - } catch { - // ignore - } - const pos = changelog.indexOf("## "); // first version section - const newChangelog = `${formatHeader(tag)}\n\n${changelogLatest.replace(/^#/gm, "##")}`; - if (pos >= 0) { - changelog = changelog.slice(0, pos) + newChangelog + "\n\n" + changelog.slice(pos); - } else { - changelog = [header, newChangelog].join("\n\n") - } - writeFileSync("CHANGELOG.md", changelog); + const changelog = readFileSync("CHANGELOG.md", "utf8"); + const newChangelog = insert(webChangelogLatest + "\n\n", changelog, changelog.indexOf("### ")); + writeFileSync("CHANGELOG.md", newChangelog); }; -function formatHeader(version, date) { - return `## ${removeVFromTag(version)} - ${formatDate(date)}`; -} - -function formatDate(d = new Date()) { - return `${d.getUTCFullYear()}-${("0" + (d.getUTCMonth() + 1)).slice(-2)}-${("0" + d.getUTCDate()).slice(-2)}`; -} - function removeVFromTag(t) { return t.replace("v", ""); } + +function insert(insert, source, pos) { + if (pos < 0) pos = 0; + return source.slice(0, pos) + insert + source.slice(pos); +} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6adf60925..f4f40a347 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,112 +2,58 @@ name: Release on: workflow_dispatch: inputs: - custom_tag: + version: required: false - description: Specify version only when you want to increment the patch and major version (e.g. 1.1.0) -env: - IMAGE: reearth/reearth - IMAGE_BASE: reearth/reearth-backend + description: 'Next version (NOTE: Switch the branch to "release"!)' + type: choice + default: minor + options: + - patch + - minor + - major jobs: release: name: Release runs-on: ubuntu-latest + if: github.ref == 'refs/heads/release' steps: + - name: git config + env: + GPT_USER: ${{ secrets.GPT_USER }} + run: | + git config --global user.name $GPT_USER + git config --global pull.rebase false - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 token: ${{ secrets.GPT }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Bump tag version - id: tag - uses: mathieudutour/github-tag-action@v5.6 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - custom_tag: ${{ github.event.inputs.custom_tag }} - default_bump: minor - dry_run: true - - name: Get Docker tags - id: tags - env: - TAG: ${{ steps.tag.outputs.new_tag }} - run: | - TAG2=${TAG#v} - TAGS=${IMAGE}:${TAG2} - if [[ ! $TAG2 =~ '-' ]]; then - TAGS+=,${IMAGE}:${TAG2%.*} - TAGS+=,${IMAGE}:${TAG2%%.*} - TAGS+=,${IMAGE}:latest - fi - echo "::set-output name=new_tag_short::$TAG2" - echo "::set-output name=tags::$TAGS" - - name: Fetch reearth-web release - uses: dsaltares/fetch-gh-release-asset@master + - id: changelog + name: Generate CHANGELOG + uses: reearth/changelog-action@main with: - repo: reearth/reearth-web - version: tags/${{ steps.tag.outputs.new_tag }} - file: reearth-web_${{ steps.tag.outputs.new_tag }}.tar.gz - token: ${{ secrets.GITHUB_TOKEN }} - - name: Extract reearth-web - run: tar -xvf reearth-web_${{ steps.tag.outputs.new_tag }}.tar.gz && mv reearth-web web - - name: Build and push - id: docker_build - uses: docker/build-push-action@v2 + version: ${{ github.event.inputs.version }} + repo: ${{ github.repository }} + latest: CHANGELOG_latest.md + - name: Insert reearth-web changelog + uses: actions/github-script@v6 with: - context: . - platforms: linux/amd64,linux/arm64 - build-args: REEARTH_BACKEND_IMAGE=${{ env.IMAGE_BASE }}:${{ steps.tags.outputs.new_tag_short }} - push: true - tags: ${{ steps.tags.outputs.tags }} - cache-from: type=gha - cache-to: type=gha,mode=max - - name: Image digest - run: echo ${{ steps.docker_build.outputs.digest }} - - name: Generate changelog - uses: actions/github-script@v5 + script: 'require("./.github/workflows/release")({ github, context })' env: - TAG: ${{ steps.tag.outputs.new_tag }} + TAG: ${{ steps.changelog.outputs.version }} + - name: Upload latest CHANGELOG + uses: actions/upload-artifact@v3 with: - script: 'require("./.github/workflows/release")({ github, context, tag: process.env.TAG })' - - name: Commit and push + name: changelog-${{ steps.changelog.outputs.version }} + path: CHANGELOG_latest.md + - name: Commit & push to release env: - TAG: ${{ steps.tag.outputs.new_tag }} + TAG: ${{ steps.changelog.outputs.version }} run: | - git config --global user.name "${{ github.actor }}" - git config --global user.email "${{ github.actor }}@users.noreply.github.com" + rm CHANGELOG_latest.md git add CHANGELOG.md - git commit -m $TAG + git commit -am "$TAG" git tag $TAG - git push - git push --tags - - name: Get current SHA - id: sha - run: echo "::set-output name=sha::$(git rev-parse HEAD)" - - name: Create GitHub release - uses: ncipollo/release-action@v1 - with: - commit: ${{ steps.sha.outputs.sha }} - name: ${{ steps.tag.outputs.new_tag }} - tag: ${{ steps.tag.outputs.new_tag }} - bodyFile: CHANGELOG_latest.md - slack-notification: - if: always() - name: Slack Notification - needs: - - release - runs-on: ubuntu-latest - steps: - - name: Slack Notification - uses: Gamesight/slack-workflow-status@master - if: always() - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} + git push --atomic origin release $TAG + - name: Commit & push to main + run: git switch main && git cherry-pick release && git push diff --git a/.github/workflows/renovate.yml b/.github/workflows/renovate.yml new file mode 100644 index 000000000..0db94b406 --- /dev/null +++ b/.github/workflows/renovate.yml @@ -0,0 +1,21 @@ +name: renovate +on: + push: + branches: + - renovate/* +jobs: + renovate-go-sum-fix: + runs-on: ubuntu-latest + steps: + - name: checkout + uses: actions/checkout@v3 + with: + fetch-depth: 2 + - name: fix + uses: at-wat/go-sum-fix-action@v0 + with: + git_user: ${{ github.actor }} + git_email: ${{ github.actor }}@users.noreply.github.com + github_token: ${{ secrets.GITHUB_TOKEN }} + commit_style: squash + push: force diff --git a/.github/workflows/stage.yml b/.github/workflows/stage.yml new file mode 100644 index 000000000..9fb75349c --- /dev/null +++ b/.github/workflows/stage.yml @@ -0,0 +1,24 @@ +name: Stage +on: + workflow_dispatch: +jobs: + stage: + runs-on: ubuntu-latest + if: github.ref == 'refs/heads/main' + steps: + - name: git config + env: + GPT_USER: ${{ secrets.GPT_USER }} + run: | + git config --global user.name $GPT_USER + git config --global pull.rebase false + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + token: ${{ secrets.GPT }} + - name: Checkout release branch + run: git switch release || git switch -c release + - name: Merge main branch to release branch + run: git merge -X theirs main + - name: Git push + run: git push origin release diff --git a/.gitignore b/.gitignore index d106a25bb..1ec8a2074 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,7 @@ +.DS_Store /mongo /data -/web +/server/web +/server/coverage.txt /.env /.env.* diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 000000000..0181837e9 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,11 @@ +{ + // See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations. + // Extension identifier format: ${publisher}.${name}. Example: vscode.csharp + // List of extensions which should be recommended for users of this workspace. + "recommendations": [ + "golang.go", + "redhat.vscode-yaml" + ], + // List of extensions recommended by VS Code that should not be recommended for users of this workspace. + "unwantedRecommendations": [] +} diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 000000000..3dc0fd9fe --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,15 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Launch server", + "type": "go", + "request": "launch", + "mode": "auto", + "cwd": "${workspaceRoot}/server", + "program": "${workspaceRoot}/server/cmd/reearth", + "env": {}, + "args": [] + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..b041699da --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,31 @@ +{ + "editor.formatOnSave": true, + "go.lintTool": "golangci-lint", + "yaml.format.enable": true, + "yaml.completion": true, + "yaml.validate": true, + "yaml.hover": true, + "yaml.schemas": { + "./server/schemas/plugin_manifest.json": [ + "/server/pkg/builtin/manifest.yml" + ], + "./server/schemas/plugin_manifest_translation.json": [ + "/server/pkg/builtin/manifest_*.yml" + ], + "https://json.schemastore.org/github-workflow.json": ".github/workflows/build.yml" + }, + "json.schemas": [ + { + "fileMatch": [ + "/server/pkg/builtin/manifest.json" + ], + "url": "./server/schemas/plugin_manifest.json" + }, + { + "fileMatch": [ + "/server/pkg/builtin/manifest_*.json" + ], + "url": "./server/schemas/plugin_manifest_translation.json" + } + ] +} diff --git a/CHANGELOG.md b/CHANGELOG.md index 63c9cae9e..aa8aa6af0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,11 @@ # Changelog + All notable changes to this project will be documented in this file. ## 0.10.0 - 2022-08-10 ### reearth-web + #### ๐Ÿš€ Features - Add mouse events to plugin API ([#280](https://github.com/reearth/reearth-web/pull/280)) [`9445f0`](https://github.com/reearth/reearth-web/commit/9445f0) @@ -30,7 +32,9 @@ All notable changes to this project will be documented in this file. - Upgrade resium to v1.15.0 ([#281](https://github.com/reearth/reearth-web/pull/281)) [`bd3968`](https://github.com/reearth/reearth-web/commit/bd3968) - Cosme changelog [`05084e`](https://github.com/reearth/reearth-web/commit/05084e) - Fix changelog [`48de86`](https://github.com/reearth/reearth-web/commit/48de86) + ### reearth-backend + #### ๐Ÿš€ Features - Configurable server host [`61b03a`](https://github.com/reearth/reearth-backend/commit/61b03a) @@ -43,100 +47,105 @@ All notable changes to this project will be documented in this file. ## 0.9.0 - 2022-07-20 ### reearth-web -#### ๐Ÿš€ Features - -- Plugin API to add layers ([#258](https://github.com/reearth/reearth-web/pull/258)) [`6468e2`](https://github.com/reearth/reearth-web/commit/6468e2) -- Change layer indicators from preset list ([#245](https://github.com/reearth/reearth-web/pull/245)) [`db185e`](https://github.com/reearth/reearth-web/commit/db185e) - -#### ๐Ÿ”ง Bug Fixes - -- Some menu not displayed at sidebar in proejct setting page [`7c0705`](https://github.com/reearth/reearth-web/commit/7c0705) -- Nothing displayed at project setting page when there are many projects [`0a6744`](https://github.com/reearth/reearth-web/commit/0a6744) -- Plugins do not work as expected, update quickjs-emscripten ([#276](https://github.com/reearth/reearth-web/pull/276)) [`9336e6`](https://github.com/reearth/reearth-web/commit/9336e6) -- Plugin editor changes do not take effect until run button is clicked ([#274](https://github.com/reearth/reearth-web/pull/274)) [`39fdb2`](https://github.com/reearth/reearth-web/commit/39fdb2) -- Storytelling widget does not get layers&[#39](https://github.com/reearth/reearth-web/pull/39); title ([#273](https://github.com/reearth/reearth-web/pull/273)) [`5ff72b`](https://github.com/reearth/reearth-web/commit/5ff72b) -- Dataset icon not showing in layer list ([#275](https://github.com/reearth/reearth-web/pull/275)) [`8dbc88`](https://github.com/reearth/reearth-web/commit/8dbc88) -- Show full camera values in camera property field popup ([#270](https://github.com/reearth/reearth-web/pull/270)) [`7d3eac`](https://github.com/reearth/reearth-web/commit/7d3eac) -- Plugin dimensions and iframe issues ([#271](https://github.com/reearth/reearth-web/pull/271)) [`f3a52a`](https://github.com/reearth/reearth-web/commit/f3a52a) -- Camera jump not working ([#269](https://github.com/reearth/reearth-web/pull/269)) [`48bbfe`](https://github.com/reearth/reearth-web/commit/48bbfe) -- Layer select state not update properly ([#268](https://github.com/reearth/reearth-web/pull/268)) [`5f7c69`](https://github.com/reearth/reearth-web/commit/5f7c69) -- Unselect layer not work properly ([#266](https://github.com/reearth/reearth-web/pull/266)) [`eb41da`](https://github.com/reearth/reearth-web/commit/eb41da) -- Layer drag and drop does not work with indicators ([#265](https://github.com/reearth/reearth-web/pull/265)) [`12ae04`](https://github.com/reearth/reearth-web/commit/12ae04) -- Testing-library react 18 warnings ([#263](https://github.com/reearth/reearth-web/pull/263)) [`4c9076`](https://github.com/reearth/reearth-web/commit/4c9076) -- Auto fetch more items in dashboard page , project list , dataset page for big screens ([#255](https://github.com/reearth/reearth-web/pull/255)) [`fb8bf9`](https://github.com/reearth/reearth-web/commit/fb8bf9) -- Asset modal flushes when camera limiter is enabled ([#261](https://github.com/reearth/reearth-web/pull/261)) [`204629`](https://github.com/reearth/reearth-web/commit/204629) -- Not being able to override an image from the asset modal ([#260](https://github.com/reearth/reearth-web/pull/260)) [`1d3c3f`](https://github.com/reearth/reearth-web/commit/1d3c3f) -- Layers pane does not update after move layer or create folder ([#259](https://github.com/reearth/reearth-web/pull/259)) [`336d98`](https://github.com/reearth/reearth-web/commit/336d98) -- Cesium flashes on camera change ([#257](https://github.com/reearth/reearth-web/pull/257)) [`ad2c0e`](https://github.com/reearth/reearth-web/commit/ad2c0e) -- Router typos ([#252](https://github.com/reearth/reearth-web/pull/252)) [`19fcb6`](https://github.com/reearth/reearth-web/commit/19fcb6) -- Dataset page showing errors on page refreshing ([#253](https://github.com/reearth/reearth-web/pull/253)) [`3f48e9`](https://github.com/reearth/reearth-web/commit/3f48e9) - -#### ๐Ÿงช Testing - -- Fix test coverage target ([#272](https://github.com/reearth/reearth-web/pull/272)) [`b9db10`](https://github.com/reearth/reearth-web/commit/b9db10) - -#### Miscellaneous Tasks - -- Update dependency cesium to ^1.95.0 ([#262](https://github.com/reearth/reearth-web/pull/262)) [`845e2a`](https://github.com/reearth/reearth-web/commit/845e2a) -- Upgrade cesium [`363071`](https://github.com/reearth/reearth-web/commit/363071) -- Upgrade to React 18 and switch to React Router ([#234](https://github.com/reearth/reearth-web/pull/234)) [`b0e8e6`](https://github.com/reearth/reearth-web/commit/b0e8e6) + +#### ๐Ÿš€ Features + +- Plugin API to add layers ([#258](https://github.com/reearth/reearth-web/pull/258)) [`6468e2`](https://github.com/reearth/reearth-web/commit/6468e2) +- Change layer indicators from preset list ([#245](https://github.com/reearth/reearth-web/pull/245)) [`db185e`](https://github.com/reearth/reearth-web/commit/db185e) + +#### ๐Ÿ”ง Bug Fixes + +- Some menu not displayed at sidebar in proejct setting page [`7c0705`](https://github.com/reearth/reearth-web/commit/7c0705) +- Nothing displayed at project setting page when there are many projects [`0a6744`](https://github.com/reearth/reearth-web/commit/0a6744) +- Plugins do not work as expected, update quickjs-emscripten ([#276](https://github.com/reearth/reearth-web/pull/276)) [`9336e6`](https://github.com/reearth/reearth-web/commit/9336e6) +- Plugin editor changes do not take effect until run button is clicked ([#274](https://github.com/reearth/reearth-web/pull/274)) [`39fdb2`](https://github.com/reearth/reearth-web/commit/39fdb2) +- Storytelling widget does not get layers&[#39](https://github.com/reearth/reearth-web/pull/39); title ([#273](https://github.com/reearth/reearth-web/pull/273)) [`5ff72b`](https://github.com/reearth/reearth-web/commit/5ff72b) +- Dataset icon not showing in layer list ([#275](https://github.com/reearth/reearth-web/pull/275)) [`8dbc88`](https://github.com/reearth/reearth-web/commit/8dbc88) +- Show full camera values in camera property field popup ([#270](https://github.com/reearth/reearth-web/pull/270)) [`7d3eac`](https://github.com/reearth/reearth-web/commit/7d3eac) +- Plugin dimensions and iframe issues ([#271](https://github.com/reearth/reearth-web/pull/271)) [`f3a52a`](https://github.com/reearth/reearth-web/commit/f3a52a) +- Camera jump not working ([#269](https://github.com/reearth/reearth-web/pull/269)) [`48bbfe`](https://github.com/reearth/reearth-web/commit/48bbfe) +- Layer select state not update properly ([#268](https://github.com/reearth/reearth-web/pull/268)) [`5f7c69`](https://github.com/reearth/reearth-web/commit/5f7c69) +- Unselect layer not work properly ([#266](https://github.com/reearth/reearth-web/pull/266)) [`eb41da`](https://github.com/reearth/reearth-web/commit/eb41da) +- Layer drag and drop does not work with indicators ([#265](https://github.com/reearth/reearth-web/pull/265)) [`12ae04`](https://github.com/reearth/reearth-web/commit/12ae04) +- Testing-library react 18 warnings ([#263](https://github.com/reearth/reearth-web/pull/263)) [`4c9076`](https://github.com/reearth/reearth-web/commit/4c9076) +- Auto fetch more items in dashboard page , project list , dataset page for big screens ([#255](https://github.com/reearth/reearth-web/pull/255)) [`fb8bf9`](https://github.com/reearth/reearth-web/commit/fb8bf9) +- Asset modal flushes when camera limiter is enabled ([#261](https://github.com/reearth/reearth-web/pull/261)) [`204629`](https://github.com/reearth/reearth-web/commit/204629) +- Not being able to override an image from the asset modal ([#260](https://github.com/reearth/reearth-web/pull/260)) [`1d3c3f`](https://github.com/reearth/reearth-web/commit/1d3c3f) +- Layers pane does not update after move layer or create folder ([#259](https://github.com/reearth/reearth-web/pull/259)) [`336d98`](https://github.com/reearth/reearth-web/commit/336d98) +- Cesium flashes on camera change ([#257](https://github.com/reearth/reearth-web/pull/257)) [`ad2c0e`](https://github.com/reearth/reearth-web/commit/ad2c0e) +- Router typos ([#252](https://github.com/reearth/reearth-web/pull/252)) [`19fcb6`](https://github.com/reearth/reearth-web/commit/19fcb6) +- Dataset page showing errors on page refreshing ([#253](https://github.com/reearth/reearth-web/pull/253)) [`3f48e9`](https://github.com/reearth/reearth-web/commit/3f48e9) + +#### ๐Ÿงช Testing + +- Fix test coverage target ([#272](https://github.com/reearth/reearth-web/pull/272)) [`b9db10`](https://github.com/reearth/reearth-web/commit/b9db10) + +#### Miscellaneous Tasks + +- Update dependency cesium to ^1.95.0 ([#262](https://github.com/reearth/reearth-web/pull/262)) [`845e2a`](https://github.com/reearth/reearth-web/commit/845e2a) +- Upgrade cesium [`363071`](https://github.com/reearth/reearth-web/commit/363071) +- Upgrade to React 18 and switch to React Router ([#234](https://github.com/reearth/reearth-web/pull/234)) [`b0e8e6`](https://github.com/reearth/reearth-web/commit/b0e8e6) ### reearth-backend -#### ๐Ÿš€ Features - -- Change layer indicators from preset list from backend side ([#158](https://github.com/reearth/reearth-backend/pull/158)) [`0267f1`](https://github.com/reearth/reearth-backend/commit/0267f1) - -#### ๐Ÿ”ง Bug Fixes - -- Property fields in a property list cannot be removed ([#160](https://github.com/reearth/reearth-backend/pull/160)) [`358237`](https://github.com/reearth/reearth-backend/commit/358237) - -#### ๐Ÿงช Testing - -- Unit test for mongo auth request repo ([#159](https://github.com/reearth/reearth-backend/pull/159)) [`5afc81`](https://github.com/reearth/reearth-backend/commit/5afc81) - -#### Miscellaneous Tasks - -- Update Makefile to remove unused targets [`67780b`](https://github.com/reearth/reearth-backend/commit/67780b) + +#### ๐Ÿš€ Features + +- Change layer indicators from preset list from backend side ([#158](https://github.com/reearth/reearth-backend/pull/158)) [`0267f1`](https://github.com/reearth/reearth-backend/commit/0267f1) + +#### ๐Ÿ”ง Bug Fixes + +- Property fields in a property list cannot be removed ([#160](https://github.com/reearth/reearth-backend/pull/160)) [`358237`](https://github.com/reearth/reearth-backend/commit/358237) + +#### ๐Ÿงช Testing + +- Unit test for mongo auth request repo ([#159](https://github.com/reearth/reearth-backend/pull/159)) [`5afc81`](https://github.com/reearth/reearth-backend/commit/5afc81) + +#### Miscellaneous Tasks + +- Update Makefile to remove unused targets [`67780b`](https://github.com/reearth/reearth-backend/commit/67780b) ## 0.8.0 - 2022-06-17 ### reearth-web -#### ๐Ÿš€ Features - -- Add a basic timeline UI ([#232](https://github.com/reearth/reearth-web/pull/232)) [`fc9732`](https://github.com/reearth/reearth-web/commit/fc9732) -- Add infinite scroll for project lists and datasets in dashboard and setting pages ([#225](https://github.com/reearth/reearth-web/pull/225)) [`28d377`](https://github.com/reearth/reearth-web/commit/28d377) -- Adapt camera field to support 2d mode ([#233](https://github.com/reearth/reearth-web/pull/233)) [`172de5`](https://github.com/reearth/reearth-web/commit/172de5) -- Add scene property overriding to Re:Earth API ([#224](https://github.com/reearth/reearth-web/pull/224)) [`b07603`](https://github.com/reearth/reearth-web/commit/b07603) - -#### ๐Ÿ”ง Bug Fixes - -- Some plugin APIs were missing ([#248](https://github.com/reearth/reearth-web/pull/248)) [`c83262`](https://github.com/reearth/reearth-web/commit/c83262) -- Slight shift when capture a new position ([#246](https://github.com/reearth/reearth-web/pull/246)) [`182406`](https://github.com/reearth/reearth-web/commit/182406) -- Dataset counts are displayed incorrectly in dataset pane ([#235](https://github.com/reearth/reearth-web/pull/235)) [`45a0c8`](https://github.com/reearth/reearth-web/commit/45a0c8) -- Labeling hidden by marker symbol ([#238](https://github.com/reearth/reearth-web/pull/238)) [`99c378`](https://github.com/reearth/reearth-web/commit/99c378) -- Vertical position style in infobox image block ([#236](https://github.com/reearth/reearth-web/pull/236)) [`647cf8`](https://github.com/reearth/reearth-web/commit/647cf8) -- Unexpected values for theme and lang props of extension components [`723486`](https://github.com/reearth/reearth-web/commit/723486) -- Wait until all extensions are loaded [`dfe2aa`](https://github.com/reearth/reearth-web/commit/dfe2aa) -- Iframe not correctly sizing to plugin ([#230](https://github.com/reearth/reearth-web/pull/230)) [`500ce8`](https://github.com/reearth/reearth-web/commit/500ce8) -- Plugin API cameramove event is not emitted in published pages ([#227](https://github.com/reearth/reearth-web/pull/227)) [`7a11b3`](https://github.com/reearth/reearth-web/commit/7a11b3) - -#### โœจ Refactor - -- Migrate to react-intl from react-i18next ([#240](https://github.com/reearth/reearth-web/pull/240)) [`404743`](https://github.com/reearth/reearth-web/commit/404743) - -#### ๐Ÿงช Testing - -- Disable util/raf tests that do not always succeed [`45a450`](https://github.com/reearth/reearth-web/commit/45a450) -- Fix unit test for utils/raf [`a060d9`](https://github.com/reearth/reearth-web/commit/a060d9) -- Fix Cypress login test fails ([#241](https://github.com/reearth/reearth-web/pull/241)) [`a5dbfb`](https://github.com/reearth/reearth-web/commit/a5dbfb) - -#### Miscellaneous Tasks - -- Upgrade dependency cesium-dnd to 1.1.0 ([#244](https://github.com/reearth/reearth-web/pull/244)) [`ba6b51`](https://github.com/reearth/reearth-web/commit/ba6b51) -- Fix typos [`f98005`](https://github.com/reearth/reearth-web/commit/f98005) + +#### ๐Ÿš€ Features + +- Add a basic timeline UI ([#232](https://github.com/reearth/reearth-web/pull/232)) [`fc9732`](https://github.com/reearth/reearth-web/commit/fc9732) +- Add infinite scroll for project lists and datasets in dashboard and setting pages ([#225](https://github.com/reearth/reearth-web/pull/225)) [`28d377`](https://github.com/reearth/reearth-web/commit/28d377) +- Adapt camera field to support 2d mode ([#233](https://github.com/reearth/reearth-web/pull/233)) [`172de5`](https://github.com/reearth/reearth-web/commit/172de5) +- Add scene property overriding to Re:Earth API ([#224](https://github.com/reearth/reearth-web/pull/224)) [`b07603`](https://github.com/reearth/reearth-web/commit/b07603) + +#### ๐Ÿ”ง Bug Fixes + +- Some plugin APIs were missing ([#248](https://github.com/reearth/reearth-web/pull/248)) [`c83262`](https://github.com/reearth/reearth-web/commit/c83262) +- Slight shift when capture a new position ([#246](https://github.com/reearth/reearth-web/pull/246)) [`182406`](https://github.com/reearth/reearth-web/commit/182406) +- Dataset counts are displayed incorrectly in dataset pane ([#235](https://github.com/reearth/reearth-web/pull/235)) [`45a0c8`](https://github.com/reearth/reearth-web/commit/45a0c8) +- Labeling hidden by marker symbol ([#238](https://github.com/reearth/reearth-web/pull/238)) [`99c378`](https://github.com/reearth/reearth-web/commit/99c378) +- Vertical position style in infobox image block ([#236](https://github.com/reearth/reearth-web/pull/236)) [`647cf8`](https://github.com/reearth/reearth-web/commit/647cf8) +- Unexpected values for theme and lang props of extension components [`723486`](https://github.com/reearth/reearth-web/commit/723486) +- Wait until all extensions are loaded [`dfe2aa`](https://github.com/reearth/reearth-web/commit/dfe2aa) +- Iframe not correctly sizing to plugin ([#230](https://github.com/reearth/reearth-web/pull/230)) [`500ce8`](https://github.com/reearth/reearth-web/commit/500ce8) +- Plugin API cameramove event is not emitted in published pages ([#227](https://github.com/reearth/reearth-web/pull/227)) [`7a11b3`](https://github.com/reearth/reearth-web/commit/7a11b3) + +#### โœจ Refactor + +- Migrate to react-intl from react-i18next ([#240](https://github.com/reearth/reearth-web/pull/240)) [`404743`](https://github.com/reearth/reearth-web/commit/404743) + +#### ๐Ÿงช Testing + +- Disable util/raf tests that do not always succeed [`45a450`](https://github.com/reearth/reearth-web/commit/45a450) +- Fix unit test for utils/raf [`a060d9`](https://github.com/reearth/reearth-web/commit/a060d9) +- Fix Cypress login test fails ([#241](https://github.com/reearth/reearth-web/pull/241)) [`a5dbfb`](https://github.com/reearth/reearth-web/commit/a5dbfb) + +#### Miscellaneous Tasks + +- Upgrade dependency cesium-dnd to 1.1.0 ([#244](https://github.com/reearth/reearth-web/pull/244)) [`ba6b51`](https://github.com/reearth/reearth-web/commit/ba6b51) +- Fix typos [`f98005`](https://github.com/reearth/reearth-web/commit/f98005) - Update config so extensionUrls can be declared in .env file for local development ([#237](https://github.com/reearth/reearth-web/pull/237)) [`545b9e`](https://github.com/reearth/reearth-web/commit/545b9e) + ### reearth-backend + #### ๐Ÿš€ Features - Add totalCount field to DatasetSchema type of GraphQL schema ([#154](https://github.com/reearth/reearth-backend/pull/154)) [`ab6334`](https://github.com/reearth/reearth-backend/commit/ab6334) @@ -190,6 +199,7 @@ All notable changes to this project will be documented in this file. - Cluster, dataset, infobox, layer, plugin and project gql query files ([#219](https://github.com/reearth/reearth-web/pull/219)) [`e4dae9`](https://github.com/reearth/reearth-web/commit/e4dae9) ### reearth-backend + #### ๐Ÿš€ Features - Add an opacity slider to map tiles ([#138](https://github.com/reearth/reearth-backend/pull/138)) [`4f72b8`](https://github.com/reearth/reearth-backend/commit/4f72b8) @@ -233,6 +243,7 @@ All notable changes to this project will be documented in this file. - Update all dependencies ([#210](https://github.com/reearth/reearth-web/pull/210)) [`c22b7a`](https://github.com/reearth/reearth-web/commit/c22b7a) ### reearth-backend + #### ๐Ÿ”ง Bug Fixes - Renovate bot not running on schedule ([#136](https://github.com/reearth/reearth-backend/pull/136)) [`82843f`](https://github.com/reearth/reearth-backend/commit/82843f) @@ -278,6 +289,7 @@ All notable changes to this project will be documented in this file. - Set default auth config to start app with zero configuration ([#191](https://github.com/reearth/reearth-web/pull/191)) [`d5a2aa`](https://github.com/reearth/reearth-web/commit/d5a2aa) ### reearth-backend + #### ๐Ÿš€ Features - Authentication system ([#108](https://github.com/reearth/reearth-backend/pull/108)) [`b89c32`](https://github.com/reearth/reearth-backend/commit/b89c32) @@ -352,6 +364,7 @@ All notable changes to this project will be documented in this file. - Upgrade dependencies ([#175](https://github.com/reearth/reearth-web/pull/175)) [`dba959`](https://github.com/reearth/reearth-web/commit/dba959) ### reearth-backend + #### ๐Ÿš€ Features - Implement property.Diff and plugin/manifest.Diff ([#107](https://github.com/reearth/reearth-backend/pull/107)) [`700269`](https://github.com/reearth/reearth-backend/commit/700269) @@ -379,29 +392,31 @@ All notable changes to this project will be documented in this file. ## 0.4.0 - 2022-01-27 ### reearth-web -#### ๐Ÿš€ Features - -- Add "clamp to filed" option to file primitive ([#155](https://github.com/reearth/reearth-web/pull/155)) [`2e83ba`](https://github.com/reearth/reearth-web/commit/2e83ba) -- Infobox padding ([#158](https://github.com/reearth/reearth-web/pull/158)) [`90084f`](https://github.com/reearth/reearth-web/commit/90084f) -- Support tags in plugin API ([#153](https://github.com/reearth/reearth-web/pull/153)) [`1031c5`](https://github.com/reearth/reearth-web/commit/1031c5) - -#### ๐Ÿ”ง Bug Fixes - -- Enable to select blocks of plugins ([#162](https://github.com/reearth/reearth-web/pull/162)) [`458402`](https://github.com/reearth/reearth-web/commit/458402) -- Cesium Ion acces token is not set expectedly ([#160](https://github.com/reearth/reearth-web/pull/160)) [`e8e183`](https://github.com/reearth/reearth-web/commit/e8e183) -- Cluster styling issue ([#161](https://github.com/reearth/reearth-web/pull/161)) [`c78872`](https://github.com/reearth/reearth-web/commit/c78872) -- Clusters and layers are not displayed correctly [`4fc124`](https://github.com/reearth/reearth-web/commit/4fc124) -- Type error [`b01bc7`](https://github.com/reearth/reearth-web/commit/b01bc7) -- The style of infobox block dropdown list is broken ([#163](https://github.com/reearth/reearth-web/pull/163)) [`6e02a9`](https://github.com/reearth/reearth-web/commit/6e02a9) -- Plugin blocks protrude from the infobox [`6cf0d3`](https://github.com/reearth/reearth-web/commit/6cf0d3) - -#### โœจ Refactor - -- Layer clustering feature ([#157](https://github.com/reearth/reearth-web/pull/157)) [`db6e6c`](https://github.com/reearth/reearth-web/commit/db6e6c) -- Camera limiter ([#149](https://github.com/reearth/reearth-web/pull/149)) [`105428`](https://github.com/reearth/reearth-web/commit/105428) -- Layer clustering feature (GraphQL) ([#159](https://github.com/reearth/reearth-web/pull/159)) [`4365b8`](https://github.com/reearth/reearth-web/commit/4365b8) + +#### ๐Ÿš€ Features + +- Add "clamp to filed" option to file primitive ([#155](https://github.com/reearth/reearth-web/pull/155)) [`2e83ba`](https://github.com/reearth/reearth-web/commit/2e83ba) +- Infobox padding ([#158](https://github.com/reearth/reearth-web/pull/158)) [`90084f`](https://github.com/reearth/reearth-web/commit/90084f) +- Support tags in plugin API ([#153](https://github.com/reearth/reearth-web/pull/153)) [`1031c5`](https://github.com/reearth/reearth-web/commit/1031c5) + +#### ๐Ÿ”ง Bug Fixes + +- Enable to select blocks of plugins ([#162](https://github.com/reearth/reearth-web/pull/162)) [`458402`](https://github.com/reearth/reearth-web/commit/458402) +- Cesium Ion acces token is not set expectedly ([#160](https://github.com/reearth/reearth-web/pull/160)) [`e8e183`](https://github.com/reearth/reearth-web/commit/e8e183) +- Cluster styling issue ([#161](https://github.com/reearth/reearth-web/pull/161)) [`c78872`](https://github.com/reearth/reearth-web/commit/c78872) +- Clusters and layers are not displayed correctly [`4fc124`](https://github.com/reearth/reearth-web/commit/4fc124) +- Type error [`b01bc7`](https://github.com/reearth/reearth-web/commit/b01bc7) +- The style of infobox block dropdown list is broken ([#163](https://github.com/reearth/reearth-web/pull/163)) [`6e02a9`](https://github.com/reearth/reearth-web/commit/6e02a9) +- Plugin blocks protrude from the infobox [`6cf0d3`](https://github.com/reearth/reearth-web/commit/6cf0d3) + +#### โœจ Refactor + +- Layer clustering feature ([#157](https://github.com/reearth/reearth-web/pull/157)) [`db6e6c`](https://github.com/reearth/reearth-web/commit/db6e6c) +- Camera limiter ([#149](https://github.com/reearth/reearth-web/pull/149)) [`105428`](https://github.com/reearth/reearth-web/commit/105428) +- Layer clustering feature (GraphQL) ([#159](https://github.com/reearth/reearth-web/pull/159)) [`4365b8`](https://github.com/reearth/reearth-web/commit/4365b8) ### reearth-backend + #### ๐Ÿš€ Features - Add "clamp to ground" option to file primitive ([#95](https://github.com/reearth/reearth-backend/pull/95)) [`559194`](https://github.com/reearth/reearth-backend/commit/559194) @@ -420,67 +435,68 @@ All notable changes to this project will be documented in this file. ## 0.3.0 - 2022-01-11 ### reearth-web - -#### ๐Ÿš€ Features - -- Enhance terrain feature (type selection, exaggeration) ([#138](https://github.com/reearth/reearth-web/pull/138)) [`dae137`](https://github.com/reearth/reearth-web/commit/dae137) -- Clustering layers ([#143](https://github.com/reearth/reearth-web/pull/143)) [`3439cc`](https://github.com/reearth/reearth-web/commit/3439cc) -- Camera limiter ([#142](https://github.com/reearth/reearth-web/pull/142)) [`dec1dd`](https://github.com/reearth/reearth-web/commit/dec1dd) -- Tagging of layers ([#144](https://github.com/reearth/reearth-web/pull/144)) [`4d0a40`](https://github.com/reearth/reearth-web/commit/4d0a40) - -#### ๐Ÿ”ง Bug Fixes - -- Indicator is not displayed on selecting of clustered layer ([#146](https://github.com/reearth/reearth-web/pull/146)) [`e41f67`](https://github.com/reearth/reearth-web/commit/e41f67) -- Use data URL for marker images [`576ea4`](https://github.com/reearth/reearth-web/commit/576ea4) -- Layer clusters do not updated correctly [`ec74f6`](https://github.com/reearth/reearth-web/commit/ec74f6) -- Position label in front of billboard ([#147](https://github.com/reearth/reearth-web/pull/147)) [`81c533`](https://github.com/reearth/reearth-web/commit/81c533) -- Public pages do not work due to clustering feature [`48d8b3`](https://github.com/reearth/reearth-web/commit/48d8b3) -- Photooverlay transition does not work in Android ([#154](https://github.com/reearth/reearth-web/pull/154)) [`decbfe`](https://github.com/reearth/reearth-web/commit/decbfe) - -#### ๐ŸŽจ Styling - -- Fix the height of the header [`9d6acc`](https://github.com/reearth/reearth-web/commit/9d6acc) - -#### Miscellaneous Tasks - -- Upgrade dependencies ([#134](https://github.com/reearth/reearth-web/pull/134)) [`740821`](https://github.com/reearth/reearth-web/commit/740821) -- Update dependency cesium to ^1.88.0 ([#139](https://github.com/reearth/reearth-web/pull/139)) [`7afdfb`](https://github.com/reearth/reearth-web/commit/7afdfb) -- Fix webpack dev server config [`8d06fa`](https://github.com/reearth/reearth-web/commit/8d06fa) -- Update dependency cesium to ^1.89.0 ([#156](https://github.com/reearth/reearth-web/pull/156)) [`d436ce`](https://github.com/reearth/reearth-web/commit/d436ce) + +#### ๐Ÿš€ Features + +- Enhance terrain feature (type selection, exaggeration) ([#138](https://github.com/reearth/reearth-web/pull/138)) [`dae137`](https://github.com/reearth/reearth-web/commit/dae137) +- Clustering layers ([#143](https://github.com/reearth/reearth-web/pull/143)) [`3439cc`](https://github.com/reearth/reearth-web/commit/3439cc) +- Camera limiter ([#142](https://github.com/reearth/reearth-web/pull/142)) [`dec1dd`](https://github.com/reearth/reearth-web/commit/dec1dd) +- Tagging of layers ([#144](https://github.com/reearth/reearth-web/pull/144)) [`4d0a40`](https://github.com/reearth/reearth-web/commit/4d0a40) + +#### ๐Ÿ”ง Bug Fixes + +- Indicator is not displayed on selecting of clustered layer ([#146](https://github.com/reearth/reearth-web/pull/146)) [`e41f67`](https://github.com/reearth/reearth-web/commit/e41f67) +- Use data URL for marker images [`576ea4`](https://github.com/reearth/reearth-web/commit/576ea4) +- Layer clusters do not updated correctly [`ec74f6`](https://github.com/reearth/reearth-web/commit/ec74f6) +- Position label in front of billboard ([#147](https://github.com/reearth/reearth-web/pull/147)) [`81c533`](https://github.com/reearth/reearth-web/commit/81c533) +- Public pages do not work due to clustering feature [`48d8b3`](https://github.com/reearth/reearth-web/commit/48d8b3) +- Photooverlay transition does not work in Android ([#154](https://github.com/reearth/reearth-web/pull/154)) [`decbfe`](https://github.com/reearth/reearth-web/commit/decbfe) + +#### ๐ŸŽจ Styling + +- Fix the height of the header [`9d6acc`](https://github.com/reearth/reearth-web/commit/9d6acc) + +#### Miscellaneous Tasks + +- Upgrade dependencies ([#134](https://github.com/reearth/reearth-web/pull/134)) [`740821`](https://github.com/reearth/reearth-web/commit/740821) +- Update dependency cesium to ^1.88.0 ([#139](https://github.com/reearth/reearth-web/pull/139)) [`7afdfb`](https://github.com/reearth/reearth-web/commit/7afdfb) +- Fix webpack dev server config [`8d06fa`](https://github.com/reearth/reearth-web/commit/8d06fa) +- Update dependency cesium to ^1.89.0 ([#156](https://github.com/reearth/reearth-web/pull/156)) [`d436ce`](https://github.com/reearth/reearth-web/commit/d436ce) ### reearth-backend - -#### ๐Ÿš€ Features - -- Clusters for scenes ([#75](https://github.com/reearth/reearth-backend/pull/75)) [`3512c0`](https://github.com/reearth/reearth-backend/commit/3512c0) -- Add fields of scene property for terrain [`8693b4`](https://github.com/reearth/reearth-backend/commit/8693b4) -- Camera limiter ([#87](https://github.com/reearth/reearth-backend/pull/87)) [`63c582`](https://github.com/reearth/reearth-backend/commit/63c582) - -#### ๐Ÿ”ง Bug Fixes - -- Terrain fields of scene property [`5e3d25`](https://github.com/reearth/reearth-backend/commit/5e3d25) -- Numbers are not decoded from gql to value [`2ddbc8`](https://github.com/reearth/reearth-backend/commit/2ddbc8) -- Layers have their own tags separate from the scene ([#90](https://github.com/reearth/reearth-backend/pull/90)) [`c4fb9a`](https://github.com/reearth/reearth-backend/commit/c4fb9a) -- Return property with clusters data ([#89](https://github.com/reearth/reearth-backend/pull/89)) [`1b99c6`](https://github.com/reearth/reearth-backend/commit/1b99c6) -- Cast values, rename value.OptionalValue ([#93](https://github.com/reearth/reearth-backend/pull/93)) [`ba4b18`](https://github.com/reearth/reearth-backend/commit/ba4b18) -- Synchronize mongo migration ([#94](https://github.com/reearth/reearth-backend/pull/94)) [`db4cea`](https://github.com/reearth/reearth-backend/commit/db4cea) - -#### ๐Ÿ“– Documentation - -- Add pkg.go.dev badge to readme [`91f9b3`](https://github.com/reearth/reearth-backend/commit/91f9b3) - -#### โœจ Refactor - -- Make property.Value and dataset.Value independent in pkg/value ([#77](https://github.com/reearth/reearth-backend/pull/77)) [`73143b`](https://github.com/reearth/reearth-backend/commit/73143b) - -#### Miscellaneous Tasks - -- Fix plugin manifest JSON schema [`2b57b1`](https://github.com/reearth/reearth-backend/commit/2b57b1) + +#### ๐Ÿš€ Features + +- Clusters for scenes ([#75](https://github.com/reearth/reearth-backend/pull/75)) [`3512c0`](https://github.com/reearth/reearth-backend/commit/3512c0) +- Add fields of scene property for terrain [`8693b4`](https://github.com/reearth/reearth-backend/commit/8693b4) +- Camera limiter ([#87](https://github.com/reearth/reearth-backend/pull/87)) [`63c582`](https://github.com/reearth/reearth-backend/commit/63c582) + +#### ๐Ÿ”ง Bug Fixes + +- Terrain fields of scene property [`5e3d25`](https://github.com/reearth/reearth-backend/commit/5e3d25) +- Numbers are not decoded from gql to value [`2ddbc8`](https://github.com/reearth/reearth-backend/commit/2ddbc8) +- Layers have their own tags separate from the scene ([#90](https://github.com/reearth/reearth-backend/pull/90)) [`c4fb9a`](https://github.com/reearth/reearth-backend/commit/c4fb9a) +- Return property with clusters data ([#89](https://github.com/reearth/reearth-backend/pull/89)) [`1b99c6`](https://github.com/reearth/reearth-backend/commit/1b99c6) +- Cast values, rename value.OptionalValue ([#93](https://github.com/reearth/reearth-backend/pull/93)) [`ba4b18`](https://github.com/reearth/reearth-backend/commit/ba4b18) +- Synchronize mongo migration ([#94](https://github.com/reearth/reearth-backend/pull/94)) [`db4cea`](https://github.com/reearth/reearth-backend/commit/db4cea) + +#### ๐Ÿ“– Documentation + +- Add pkg.go.dev badge to readme [`91f9b3`](https://github.com/reearth/reearth-backend/commit/91f9b3) + +#### โœจ Refactor + +- Make property.Value and dataset.Value independent in pkg/value ([#77](https://github.com/reearth/reearth-backend/pull/77)) [`73143b`](https://github.com/reearth/reearth-backend/commit/73143b) + +#### Miscellaneous Tasks + +- Fix plugin manifest JSON schema [`2b57b1`](https://github.com/reearth/reearth-backend/commit/2b57b1) ## 0.2.0 - 2021-11-18 #### reearth-web + #### ๐Ÿš€ Features - Widget align system for mobile ([#115](https://github.com/reearth/reearth-web/pull/115)) [`afa4ba`](https://github.com/reearth/reearth-web/commit/afa4ba) @@ -506,6 +522,7 @@ All notable changes to this project will be documented in this file. ### reearth-backend + #### ๐Ÿš€ Features - Support opentelemetry ([#68](https://github.com/reearth/reearth-backend/pull/68)) [`25c581`](https://github.com/reearth/reearth-backend/commit/25c581) @@ -531,6 +548,7 @@ All notable changes to this project will be documented in this file. ## 0.1.0 - 2021-11-02 ### reearth-web + #### ๐Ÿš€ Features - Support Auth0 audience ([#2](https://github.com/reearth/reearth-web/pull/2)) [`0ad0f6`](https://github.com/reearth/reearth-web/commit/0ad0f6) @@ -667,6 +685,7 @@ All notable changes to this project will be documented in this file. ### reearth-backend + #### ๐Ÿš€ Features - Support Auth0 audience ([#3](https://github.com/reearth/reearth-backend/pull/3)) [`c3758e`](https://github.com/reearth/reearth-backend/commit/c3758e) @@ -770,4 +789,3 @@ All notable changes to this project will be documented in this file. - Fix release workflow, fix build comment [skip ci] [`cfc79a`](https://github.com/reearth/reearth-backend/commit/cfc79a) - Fix renaming file names in release workflow [`96f0b3`](https://github.com/reearth/reearth-backend/commit/96f0b3) - Fix and refactor release workflow [skip ci] [`d5466b`](https://github.com/reearth/reearth-backend/commit/d5466b) - diff --git a/Dockerfile b/Dockerfile index cb36a256d..d7c6b1ad1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,26 @@ -ARG REEARTH_BACKEND_IMAGE=reearth/reearth-backend:latest -FROM $REEARTH_BACKEND_IMAGE +FROM golang:1.18-alpine AS build +ARG TAG=release +ARG REV +ARG VERSION -COPY web /reearth/web +RUN apk add --update --no-cache git ca-certificates build-base +COPY server/go.mod server/go.sum server/main.go /reearth/ WORKDIR /reearth +RUN go mod download + +COPY server/cmd/ /reearth/cmd/ +COPY server/pkg/ /reearth/pkg/ +COPY server/internal/ /reearth/internal/ + +RUN CGO_ENABLED=0 go build -tags "${TAG}" "-ldflags=-X main.version=${VERSION} -s -w -buildid=" -trimpath ./cmd/reearth + +FROM scratch + +COPY --from=build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt +COPY --from=build /reearth/reearth /reearth/reearth +COPY web/dist* /reearth/web/ + +WORKDIR /reearth + CMD [ "./reearth" ] diff --git a/server/.env.example b/server/.env.example new file mode 100644 index 000000000..b4b236ff6 --- /dev/null +++ b/server/.env.example @@ -0,0 +1,52 @@ +# General +PORT=8080 +REEARTH_DB=mongodb://localhost +REEARTH_HOST=https://localhost:8080 +REEARTH_HOST_WEB=https://localhost:3000 +REEARTH_DEV=false + +# GCP +GOOGLE_CLOUD_PROJECT= +GCS_BUCKETNAME= +GCS_PUBLICATIONCACHECONTROL= + +# Local Auth serv +REEARTH_AUTH0_DOMAIN=https://example.auth0.com +REEARTH_AUTH0_AUDIENCE=https://api.reearth.example.com +REEARTH_AUTH0_CLIENTID= +REEARTH_AUTH0_CLIENTSECRET= +REEARTH_AUTH0_WEBCLIENTID= + +# Auth client +#REEARTH_AUTH_ISS=https://hoge.com +#REEARTH_AUTH_AUD=https://api.reearth.example.com +# If you want to use multiple auth servers +#REEARTH_AUTH=[{"ISS":"https://hoge.com","AUD":["https://api.reearth.example.com"]}] + +# Auth server +# If you want to restrict signups, set secret +REEARTH_SIGNUP_SECRET= +# If you want to run auth server on localhost, set to true +REEARTH_AUTHSRV_DEV=true +REEARTH_AUTHSRV_DISABLED=false +REEARTH_AUTHSRV_UIDOMAIN=https://reearth.example.com +REEARTH_AUTHSRV_DOMAIN=https://api.reearth.example.com +# Any random long string (keep it secret) +REEARTH_AUTHSRV_KEY=abcdefghijklmnopqrstuvwxyz + +# Available mailers: [log, smtp, sendgrid] +REEARTH_MAILER=log + +# SendGrid config +#REEARTH_MAILER=sendgrid +#REEARTH_SENDGRID_EMAIL=noreplay@test.com +#REEARTH_SENDGRID_NAME= +#REEARTH_SENDGRID_API= + +# SMTP config +#REEARTH_MAILER=smtp +#REEARTH_SMTP_EMAIL=noreplay@test.com +#REEARTH_SMTP_HOST=smtp.sendgrid.net +#REEARTH_SMTP_PORT=587 +#REEARTH_SMTP_SMTPUSERNAME=apikey +#REEARTH_SMTP_PASSWORD=Your_SendGrid_Token diff --git a/server/.gitignore b/server/.gitignore new file mode 100644 index 000000000..de2f5f191 --- /dev/null +++ b/server/.gitignore @@ -0,0 +1,30 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib +.DS_Store + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Editor +.idea + +# reearth +/dist +/reearth +/reearth-backend +__debug_bin +/data +/bin +/debug +/mongo +/.env* +!/.env.example +/coverage.txt +/web diff --git a/server/.golangci.yml b/server/.golangci.yml new file mode 100644 index 000000000..63eab2e25 --- /dev/null +++ b/server/.golangci.yml @@ -0,0 +1,10 @@ +issues: + exclude-use-default: false + +linters: + enable: + - gofmt + - goimports + +goimports: + local-prefixes: github.com/reearth/reearth-backend diff --git a/server/.goreleaser.yml b/server/.goreleaser.yml new file mode 100644 index 000000000..8cd9e5cbc --- /dev/null +++ b/server/.goreleaser.yml @@ -0,0 +1,30 @@ +project_name: reearth +before: + hooks: + - go mod tidy +builds: + - main: ./cmd/reearth + flags: + - -tags=release + - -trimpath + ldflags: + - -s -w + - -X main.version={{.Version}} + - -buildid= + env: + - CGO_ENABLED=0 +archives: + - name_template: "{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}" + replacements: + darwin: darwin + linux: linux + windows: windows + 386: i386 + amd64: x86_64 + format_overrides: + - goos: windows + format: zip +changelog: + skip: true +release: + disable: true diff --git a/server/.graphqlconfig b/server/.graphqlconfig new file mode 100644 index 000000000..5c5a31cea --- /dev/null +++ b/server/.graphqlconfig @@ -0,0 +1,3 @@ +{ + "schemaPath": "schema.graphql" +} diff --git a/server/Makefile b/server/Makefile new file mode 100644 index 000000000..0b0232bcb --- /dev/null +++ b/server/Makefile @@ -0,0 +1,19 @@ +lint: + golangci-lint run --fix + +test: + go test -race -v ./... + +build: + go build ./cmd/reearth + +run-app: + go run ./cmd/reearth + +run-db: + docker compose up -f ../docker-compose.yml -d reearth-mongo + +gql: + go generate ./internal/adapter/gql + +.PHONY: lint test build run-app run-db gql diff --git a/server/README.md b/server/README.md new file mode 100644 index 000000000..e69de29bb diff --git a/server/cmd/reearth/debug.go b/server/cmd/reearth/debug.go new file mode 100644 index 000000000..57897e39e --- /dev/null +++ b/server/cmd/reearth/debug.go @@ -0,0 +1,5 @@ +//go:build !release + +package main + +const debug = true diff --git a/server/cmd/reearth/main.go b/server/cmd/reearth/main.go new file mode 100644 index 000000000..7246bdfe8 --- /dev/null +++ b/server/cmd/reearth/main.go @@ -0,0 +1,9 @@ +package main + +import "github.com/reearth/reearth-backend/internal/app" + +var version = "" + +func main() { + app.Start(debug, version) +} diff --git a/server/cmd/reearth/release.go b/server/cmd/reearth/release.go new file mode 100644 index 000000000..9ac0438f8 --- /dev/null +++ b/server/cmd/reearth/release.go @@ -0,0 +1,5 @@ +//go:build release + +package main + +const debug = false diff --git a/server/codecov.yml b/server/codecov.yml new file mode 100644 index 000000000..2960b1ec7 --- /dev/null +++ b/server/codecov.yml @@ -0,0 +1,13 @@ +comment: + layout: 'reach, diff, flags, files' + behavior: default + require_changes: false +ignore: + - "**/*_gen.go" + - "**/*_test.go" + - "**/doc.go" + - "**/testdata" + - internal/adapter/gql/generated.go + - tools + - main.go + - tools.go diff --git a/server/go.mod b/server/go.mod new file mode 100644 index 000000000..52897d13f --- /dev/null +++ b/server/go.mod @@ -0,0 +1,130 @@ +module github.com/reearth/reearth-backend + +require ( + cloud.google.com/go/profiler v0.3.0 + cloud.google.com/go/storage v1.22.1 + github.com/99designs/gqlgen v0.17.5 + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.7.0 + github.com/auth0/go-jwt-middleware/v2 v2.0.1 + github.com/avast/retry-go/v4 v4.0.4 + github.com/blang/semver v3.5.1+incompatible + github.com/caos/oidc v1.2.0 + github.com/goccy/go-yaml v1.9.5 + github.com/golang/gddo v0.0.0-20210115222349-20d68f94ee1f + github.com/google/uuid v1.3.0 + github.com/gorilla/mux v1.8.0 + github.com/iancoleman/strcase v0.2.0 + github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d + github.com/jarcoal/httpmock v1.2.0 + github.com/joho/godotenv v1.4.0 + github.com/jonas-p/go-shp v0.1.1 + github.com/kelseyhightower/envconfig v1.4.0 + github.com/kennygrant/sanitize v1.2.4 + github.com/labstack/echo/v4 v4.7.2 + github.com/labstack/gommon v0.3.1 + github.com/mitchellh/mapstructure v1.5.0 + github.com/oklog/ulid v1.3.1 + github.com/paulmach/go.geojson v1.4.0 + github.com/pkg/errors v0.9.1 + github.com/ravilushqa/otelgqlgen v0.6.1 + github.com/samber/lo v1.21.0 + github.com/sendgrid/sendgrid-go v3.11.1+incompatible + github.com/sirupsen/logrus v1.8.1 + github.com/spf13/afero v1.8.2 + github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2 + github.com/stretchr/testify v1.7.1 + github.com/twpayne/go-kml v1.5.2 + github.com/uber/jaeger-client-go v2.30.0+incompatible + github.com/uber/jaeger-lib v2.4.1+incompatible + github.com/vektah/dataloaden v0.3.0 + github.com/vektah/gqlparser/v2 v2.4.2 + go.mongodb.org/mongo-driver v1.9.1 + go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.32.0 + go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.32.0 + go.opentelemetry.io/otel v1.7.0 + go.opentelemetry.io/otel/sdk v1.7.0 + golang.org/x/crypto v0.0.0-20220518034528-6f7dac969898 + golang.org/x/exp v0.0.0-20220518171630-0b5c67f07fdf + golang.org/x/text v0.3.7 + golang.org/x/tools v0.1.10 + google.golang.org/api v0.80.0 + gopkg.in/go-playground/colors.v1 v1.2.0 + gopkg.in/h2non/gock.v1 v1.1.2 + gopkg.in/square/go-jose.v2 v2.6.0 +) + +require ( + cloud.google.com/go v0.100.2 // indirect + cloud.google.com/go/compute v1.6.1 // indirect + cloud.google.com/go/iam v0.3.0 // indirect + cloud.google.com/go/trace v1.2.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.31.0 // indirect + github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect + github.com/agnivade/levenshtein v1.1.1 // indirect + github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect + github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a // indirect + github.com/caos/logging v0.0.2 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b // indirect + github.com/fatih/color v1.12.0 // indirect + github.com/felixge/httpsnoop v1.0.2 // indirect + github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 // indirect + github.com/go-logr/logr v1.2.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-stack/stack v1.8.0 // indirect + github.com/golang-jwt/jwt v3.2.2+incompatible // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/golang/snappy v0.0.3 // indirect + github.com/google/go-cmp v0.5.8 // indirect + github.com/google/pprof v0.0.0-20220412212628-83db2b799d1f // indirect + github.com/googleapis/gax-go/v2 v2.4.0 // indirect + github.com/googleapis/go-type-adapters v1.0.0 // indirect + github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 // indirect + github.com/gorilla/handlers v1.5.1 // indirect + github.com/gorilla/schema v1.2.0 // indirect + github.com/gorilla/securecookie v1.1.1 // indirect + github.com/gorilla/websocket v1.5.0 // indirect + github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect + github.com/hashicorp/golang-lru v0.5.4 // indirect + github.com/klauspost/compress v1.13.6 // indirect + github.com/matryer/moq v0.2.7 // indirect + github.com/mattn/go-colorable v0.1.12 // indirect + github.com/mattn/go-isatty v0.0.14 // indirect + github.com/opentracing/opentracing-go v1.2.0 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/sendgrid/rest v2.6.6+incompatible // indirect + github.com/smartystreets/assertions v1.1.1 // indirect + github.com/smartystreets/goconvey v1.6.4 // indirect + github.com/stretchr/objx v0.2.0 // indirect + github.com/tidwall/pretty v1.0.1 // indirect + github.com/urfave/cli/v2 v2.4.0 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasttemplate v1.2.1 // indirect + github.com/xdg-go/pbkdf2 v1.0.0 // indirect + github.com/xdg-go/scram v1.0.2 // indirect + github.com/xdg-go/stringprep v1.0.2 // indirect + github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect + go.opencensus.io v0.23.0 // indirect + go.opentelemetry.io/contrib v1.6.0 // indirect + go.opentelemetry.io/otel/trace v1.7.0 // indirect + go.uber.org/atomic v1.7.0 // indirect + golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect + golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4 // indirect + golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect + golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect + golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6 // indirect + golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect + golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335 // indirect + google.golang.org/grpc v1.46.0 // indirect + google.golang.org/protobuf v1.28.0 // indirect + gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect +) + +go 1.18 diff --git a/server/go.sum b/server/go.sum new file mode 100644 index 000000000..d88e0b90b --- /dev/null +++ b/server/go.sum @@ -0,0 +1,1051 @@ +cloud.google.com/go v0.16.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= +cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= +cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= +cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= +cloud.google.com/go v0.100.2 h1:t9Iw5QH5v4XtlEQaCtUY7x6sCABps8sW0acw7e2WQ6Y= +cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= +cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= +cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= +cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= +cloud.google.com/go/compute v1.6.1 h1:2sMmt8prCn7DPaG4Pmh0N3Inmc8cT8ae5k1M6VJ9Wqc= +cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc= +cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= +cloud.google.com/go/profiler v0.3.0 h1:R6y/xAeifaUXxd2x6w+jIwKxoKl8Cv5HJvcvASTPWJo= +cloud.google.com/go/profiler v0.3.0/go.mod h1:9wYk9eY4iZHsev8TQb61kh3wiOiSyz/xOYixWPzweCU= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +cloud.google.com/go/storage v1.22.1 h1:F6IlQJZrZM++apn9V5/VfS3gbTUYg98PS3EMQAzqtfg= +cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= +cloud.google.com/go/trace v1.2.0 h1:oIaB4KahkIUOpLSAAjEJ8y2desbjY/x/RfP4O3KAtTI= +cloud.google.com/go/trace v1.2.0/go.mod h1:Wc8y/uYyOhPy12KEnXG9XGrvfMz5F5SrYecQlbW1rwM= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/99designs/gqlgen v0.17.5 h1:bTgv7FQz3+NROg6ooHtlkaJ82Uqrp6e5sAziXTBo1hc= +github.com/99designs/gqlgen v0.17.5/go.mod h1:SNpLVzaF37rRLSAXtu8FKVp5I4zycneMmFX6NT4XGSU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.7.0 h1:8vpIORQCKkwM0r/IZ1faAddG56t7byhqSxATphc+8MI= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.7.0/go.mod h1:HuFNmMWVYJDj2IxyIlUOW2vguRBM8ct9mOuAtWRU2EQ= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.31.0 h1:tfaeStvrph8eJEmo1iji3A4DXen3s6ZMM17nQmvo0WA= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.31.0/go.mod h1:j+FS9VBW3mwtHBmm9KOJEy5Tq68fCp7fE/R9bV/flIM= +github.com/HdrHistogram/hdrhistogram-go v1.0.1 h1:GX8GAYDuhlFQnI2fRDHQhTlkHMz8bEn0jTI6LJU0mpw= +github.com/HdrHistogram/hdrhistogram-go v1.0.1/go.mod h1:BWJ+nMSHY3L41Zj7CA3uXnloDp7xxV0YvstAE7nKTaM= +github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= +github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= +github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= +github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8= +github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a h1:E/8AP5dFtMhl5KPJz66Kt9G0n+7Sn41Fy1wv9/jHOrc= +github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= +github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= +github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= +github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= +github.com/auth0/go-jwt-middleware/v2 v2.0.1 h1:zAgDKL7nsfVBFl31GGxsSXkhuRzYe1fVtJcO3aMSrFU= +github.com/auth0/go-jwt-middleware/v2 v2.0.1/go.mod h1:kDt7JgUuDEp1VutfUmO4ZxBLL51vlNu/56oDfXc5E0Y= +github.com/avast/retry-go/v4 v4.0.4 h1:38hLf0DsRXh+hOF6HbTni0+5QGTNdw9zbaMD7KAO830= +github.com/avast/retry-go/v4 v4.0.4/go.mod h1:HqmLvS2VLdStPCGDFjSuZ9pzlTqVRldCI4w2dO4m1Ms= +github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= +github.com/aws/aws-sdk-go v1.35.5/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+muhnW+k= +github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= +github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/bradfitz/gomemcache v0.0.0-20170208213004-1952afaa557d/go.mod h1:PmM6Mmwb0LSuEubjR8N7PtNe1KxZLtOUHtbeikc5h60= +github.com/caos/logging v0.0.2 h1:ebg5C/HN0ludYR+WkvnFjwSExF4wvyiWPyWGcKMYsoo= +github.com/caos/logging v0.0.2/go.mod h1:9LKiDE2ChuGv6CHYif/kiugrfEXu9AwDiFWSreX7Wp0= +github.com/caos/oidc v1.2.0 h1:dTy5bcT2WQbwPgytEZiG8SV1bCgHUXyDdaPDCNtRdEU= +github.com/caos/oidc v1.2.0/go.mod h1:4l0PPwdc6BbrdCFhNrRTUddsG292uHGa7gE2DSEIqoU= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= +github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= +github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b h1:8xx0j7yceTAgVxonE+qOOepmwWS/Ic3OLQapY9HJajc= +github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= +github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= +github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= +github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/felixge/httpsnoop v1.0.2 h1:+nS9g82KMXccJ/wp0zyRW9ZBHFETmMGtkk+2CTTrW4o= +github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/fsnotify/fsnotify v1.4.3-0.20170329110642-4da3e2cfbabc/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/garyburd/redigo v1.1.1-0.20170914051019-70e1b1943d4f/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= +github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 h1:Uc+IZ7gYqAf/rSGFplbWBSHaGolEQlNLgMgSE3ccnIQ= +github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813/go.mod h1:P+oSoE9yhSRvsmYyZsshflcR6ePWYLql6UU1amW13IM= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= +github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= +github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= +github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.6.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-test/deep v1.0.1 h1:UQhStjbkDClarlmv0am7OXXO4/GaPdCGiUiMTvi28sg= +github.com/go-test/deep v1.0.1/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= +github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= +github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= +github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs= +github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk= +github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28= +github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo= +github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk= +github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw= +github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360= +github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg= +github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE= +github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8= +github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= +github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= +github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= +github.com/goccy/go-yaml v1.9.5 h1:Eh/+3uk9kLxG4koCX6lRMAPS1OaMSAi+FJcya0INdB0= +github.com/goccy/go-yaml v1.9.5/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA= +github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= +github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang/gddo v0.0.0-20210115222349-20d68f94ee1f h1:16RtHeWGkJMc80Etb8RPCcKevXGldr57+LOyZt8zOlg= +github.com/golang/gddo v0.0.0-20210115222349-20d68f94ee1f/go.mod h1:ijRvpgDJDI262hYq/IQVYgf8hd8IHUs93Ol0kvMBAx4= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/lint v0.0.0-20170918230701-e5d664eb928e/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.0-20170215233205-553a64147049/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.1.1-0.20171103154506-982329095285/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-github/v31 v31.0.0/go.mod h1:NQPZol8/1sMoWYGN2yaALIBytu17gAWfhbweiEed3pM= +github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.2.1 h1:d8MncMlErDFTwQGBK1xhv026j9kqhvw1Qv9IbWT1VLQ= +github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20220412212628-83db2b799d1f h1:VrKTY4lquiy1oJzVZgXrauku9Jx9P+POv/gTLakG4Wk= +github.com/google/pprof v0.0.0-20220412212628-83db2b799d1f/go.mod h1:Pt31oes+eGImORns3McJn8zHefuQl2rG8l6xQjGYB4U= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= +github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= +github.com/googleapis/gax-go/v2 v2.4.0 h1:dS9eYAjhrE2RjmzYw2XAPvcXfmcQLtFEQWn0CR82awk= +github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= +github.com/googleapis/go-type-adapters v1.0.0 h1:9XdMn+d/G57qq1s8dNc5IesGCXHf6V2HZ2JwRxfA2tA= +github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= +github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c29bdd3 h1:eHv/jVY/JNop1xg2J9cBb4EzyMpWZoNCP1BslSAIkOI= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 h1:l5lAOZEym3oK3SQ2HBHWsJUfbNBiTXJDeW2QDxw9AQ0= +github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4= +github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/schema v1.2.0 h1:YufUaxZYCKGFuAq3c96BOhjgd5nmXiOY9NGzF247Tsc= +github.com/gorilla/schema v1.2.0/go.mod h1:kgLaKoK1FELgZqMAVxx/5cbj0kT+57qxUrAlIO2eleU= +github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= +github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gregjones/httpcache v0.0.0-20170920190843-316c5e0ff04e/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw= +github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/hcl v0.0.0-20170914154624-68e816d1c783/go.mod h1:oZtUIOe8dh44I2q6ScRibXws4Ajl+d+nod3AaR9vL5w= +github.com/huandu/xstrings v1.3.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/iancoleman/strcase v0.2.0 h1:05I4QRnGpI0m37iZQRuskXh+w77mr6Z41lwQzuHLwW0= +github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= +github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d h1:sQbbvtUoen3Tfl9G/079tXeqniwPH6TgM/lU4y7lQN8= +github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d/go.mod h1:xVHEhsiSJJnT0jlcQpQUg+GyoLf0i0xciM1kqWTGT58= +github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/inconshreveable/log15 v0.0.0-20170622235902-74a0988b5f80/go.mod h1:cOaXtrgN4ScfRrD9Bre7U1thNq5RtJ8ZoP4iXVGRj6o= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/jarcoal/httpmock v1.2.0 h1:gSvTxxFR/MEMfsGrvRbdfpRUMBStovlSRLw0Ep1bwwc= +github.com/jarcoal/httpmock v1.2.0/go.mod h1:oCoTsnAz4+UoOUIf5lJOWV2QQIW5UoeUI6aM2YnWAZk= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= +github.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg= +github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/jonas-p/go-shp v0.1.1 h1:LY81nN67DBCz6VNFn2kS64CjmnDo9IP8rmSkTvhO9jE= +github.com/jonas-p/go-shp v0.1.1/go.mod h1:MRIhyxDQ6VVp0oYeD7yPGr5RSTNScUFKCDsI5DR7PtI= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= +github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= +github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= +github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= +github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o= +github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak= +github.com/kevinmbeaulieu/eq-go v1.0.0/go.mod h1:G3S8ajA56gKBZm4UB9AOyoOS37JO3roToPzKNM8dtdM= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/compress v1.11.1/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc= +github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/labstack/echo/v4 v4.7.2 h1:Kv2/p8OaQ+M6Ex4eGimg9b9e6icoxA42JSlOR3msKtI= +github.com/labstack/echo/v4 v4.7.2/go.mod h1:xkCDAdFCIf8jsFQ5NnbK7oqaF/yU1A1X20Ltm0OvSks= +github.com/labstack/gommon v0.3.1 h1:OomWaJXm7xR6L1HmEtGyQf26TEn7V6X88mktX9kee9o= +github.com/labstack/gommon v0.3.1/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM= +github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc= +github.com/magiconair/properties v1.7.4-0.20170902060319-8d7837e64d3c/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= +github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= +github.com/matryer/moq v0.2.7 h1:RtpiPUM8L7ZSCbSwK+QcZH/E9tgqAkFjKQxsRs25b4w= +github.com/matryer/moq v0.2.7/go.mod h1:kITsx543GOENm48TUAQyJ9+SAvFSr7iGQXPoth/VUBk= +github.com/mattn/go-colorable v0.0.10-0.20170816031813-ad5389df28cd/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-isatty v0.0.2/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/maxatome/go-testdeep v1.11.0 h1:Tgh5efyCYyJFGUYiT0qxBSIDeXw0F5zSoatlou685kk= +github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= +github.com/mitchellh/mapstructure v0.0.0-20170523030023-d0303fe80992/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.3.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4= +github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= +github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/paulmach/go.geojson v1.4.0 h1:5x5moCkCtDo5x8af62P9IOAYGQcYHtxz2QJ3x1DoCgY= +github.com/paulmach/go.geojson v1.4.0/go.mod h1:YaKx1hKpWF+T2oj2lFJPsW/t1Q5e1jQI61eoQSTwpIs= +github.com/pelletier/go-toml v1.0.1-0.20170904195809-1d6b12b7cb29/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= +github.com/pierrre/gotestcover v0.0.0-20160517101806-924dca7d15f0/go.mod h1:4xpMLz7RBWyB+ElzHu8Llua96TRCB3YwX+l5EP1wmHk= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/ravilushqa/otelgqlgen v0.6.1 h1:KoRURWiQfthje/G6hG6zDF9QjoEFrmHgb3mAe5kZZ7k= +github.com/ravilushqa/otelgqlgen v0.6.1/go.mod h1:2SUPOCCsJdvbyfLtZI81C/1Q76jSFNFs/2NrtyTI2AQ= +github.com/robertkrimen/godocdown v0.0.0-20130622164427-0bfa04905481/go.mod h1:C9WhFzY47SzYBIvzFqSvHIR6ROgDo4TtdTuRaOMjF/s= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/samber/lo v1.21.0 h1:FSby8pJQtX4KmyddTCCGhc3JvnnIVrDA+NW37rG+7G8= +github.com/samber/lo v1.21.0/go.mod h1:2I7tgIv8Q1SG2xEIkRq0F2i2zgxVpnyPOP0d3Gj2r+A= +github.com/sendgrid/rest v2.6.6+incompatible h1:3rO5UTPhLQo6fjytWwdwRWclP101CqErg2klf8LneB4= +github.com/sendgrid/rest v2.6.6+incompatible/go.mod h1:kXX7q3jZtJXK5c5qK83bSGMdV6tsOE70KbHoqJls4lE= +github.com/sendgrid/sendgrid-go v3.11.1+incompatible h1:ai0+woZ3r/+tKLQExznak5XerOFoD6S7ePO0lMV8WXo= +github.com/sendgrid/sendgrid-go v3.11.1+incompatible/go.mod h1:QRQt+LX/NmgVEvmdRw0VT/QgUn499+iza2FnDca9fg8= +github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= +github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/assertions v1.1.1 h1:T/YLemO5Yp7KPzS+lVtu+WsHn8yoSwTfItdAd1r3cck= +github.com/smartystreets/assertions v1.1.1/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= +github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v0.0.0-20170901052352-ee1bd8ee15a1/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= +github.com/spf13/afero v1.8.2 h1:xehSyVa0YnHWsJ49JFljMpg1HX19V6NDZ1fkm1Xznbo= +github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= +github.com/spf13/cast v1.1.0/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/jwalterweatherman v0.0.0-20170901151539-12bd96e66386/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v1.0.1-0.20170901120850-7aff26db30c1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/viper v1.0.0/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7SrnBM= +github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2 h1:Fod/tm/5c19889+T6j7mXxg/tEJrcLuDJxR/98raj80= +github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2/go.mod h1:h98Zzl76KWv7bG0FHBMA9MAcDhwcIyE7q570tDP7CmY= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/thoas/go-funk v0.9.1 h1:O549iLZqPpTUQ10ykd26sZhzD+rmR5pWhuElrhbC20M= +github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tidwall/pretty v1.0.1 h1:WE4RBSZ1x6McVVC8S/Md+Qse8YUv6HRObAx6ke00NY8= +github.com/tidwall/pretty v1.0.1/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/twpayne/go-kml v1.5.2 h1:rFMw2/EwgkVssGS2MT6YfWSPZz6BgcJkLxQ53jnE8rQ= +github.com/twpayne/go-kml v1.5.2/go.mod h1:kz8jAiIz6FIdU2Zjce9qGlVtgFYES9vt7BTPBHf5jl4= +github.com/twpayne/go-polyline v1.0.0/go.mod h1:ICh24bcLYBX8CknfvNPKqoTbe+eg+MX1NPyJmSBo7pU= +github.com/twpayne/go-waypoint v0.0.0-20200706203930-b263a7f6e4e8/go.mod h1:qj5pHncxKhu9gxtZEYWypA/z097sxhFlbTyOyt9gcnU= +github.com/uber/jaeger-client-go v2.30.0+incompatible h1:D6wyKGCecFaSRUpo8lCVbaOOb6ThwMmTEbhRwtKR97o= +github.com/uber/jaeger-client-go v2.30.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= +github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVKhn2Um6rjCsSsg= +github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= +github.com/urfave/cli/v2 v2.4.0 h1:m2pxjjDFgDxSPtO8WSdbndj17Wu2y8vOT86wE/tjr+I= +github.com/urfave/cli/v2 v2.4.0/go.mod h1:NX9W0zmTvedE5oDoOMs2RTC8RvdK98NTYZE5LbaEYPg= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4= +github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= +github.com/vektah/dataloaden v0.3.0 h1:ZfVN2QD6swgvp+tDqdH/OIT/wu3Dhu0cus0k5gIZS84= +github.com/vektah/dataloaden v0.3.0/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= +github.com/vektah/gqlparser/v2 v2.4.2 h1:29TGc6QmhEUq5fll+2FPoTmhUhR65WEKN4VK/jo0OlM= +github.com/vektah/gqlparser/v2 v2.4.2/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= +github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.0.2 h1:akYIkZ28e6A96dkWNJQu3nmCzH3YfwMPQExUYDaRv7w= +github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= +github.com/xdg-go/stringprep v1.0.2 h1:6iq84/ryjjeRmMJwxutI51F2GIPlP5BfTvXHeYjyhBc= +github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= +github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= +github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= +github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +go.mongodb.org/mongo-driver v1.4.2/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= +go.mongodb.org/mongo-driver v1.9.0/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= +go.mongodb.org/mongo-driver v1.9.1 h1:m078y9v7sBItkt1aaoe2YlvWEXcD263e1a4E1fBrJ1c= +go.mongodb.org/mongo-driver v1.9.1/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opentelemetry.io/contrib v1.6.0 h1:xJawAzMuR3s4Au5p/ABHqYFychHjK2AHB9JvkBuBbTA= +go.opentelemetry.io/contrib v1.6.0/go.mod h1:FlyPNX9s4U6MCsWEc5YAK4KzKNHFDsjrDUZijJiXvy8= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.32.0 h1:bkyJgifVcPo1w8HYf1K0ExtgdmNgxyVa02o/yFDrSAA= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.32.0/go.mod h1:rmdIBqEgyXERsERn9CjVXXPL9qAinIsID+X9AhBnzOQ= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.32.0 h1:gNKQHn+q326vsi+kOskx9FCz9Jkz2fvxlf1y46dTN14= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.32.0/go.mod h1:9WqBmOJ4AOChNHtnRBSCGlKN4PQf1coLTCK57fyXE/s= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.31.0 h1:woM+Mb4d0A+Dxa3rYPenSN5ZeS9qHUvE8rlObiLRXTY= +go.opentelemetry.io/contrib/propagators/b3 v1.7.0 h1:oRAenUhj+GFttfIp3gj7HYVzBhPOHgq/dWPDSmLCXSY= +go.opentelemetry.io/contrib/propagators/b3 v1.7.0/go.mod h1:gXx7AhL4xXCF42gpm9dQvdohoDa2qeyEx4eIIxqK+h4= +go.opentelemetry.io/otel v1.7.0 h1:Z2lA3Tdch0iDcrhJXDIlC94XE+bxok1F9B+4Lz/lGsM= +go.opentelemetry.io/otel v1.7.0/go.mod h1:5BdUoMIz5WEs0vt0CUEMtSSaTSHBBVwrhnz7+nrD5xk= +go.opentelemetry.io/otel/metric v0.28.0 h1:o5YNh+jxACMODoAo1bI7OES0RUW4jAMae0Vgs2etWAQ= +go.opentelemetry.io/otel/sdk v1.7.0 h1:4OmStpcKVOfvDOgCt7UriAPtKolwIhxpnSNI/yK+1B0= +go.opentelemetry.io/otel/sdk v1.7.0/go.mod h1:uTEOTwaqIVuTGiJN7ii13Ibp75wJmYUDe374q6cZwUU= +go.opentelemetry.io/otel/trace v1.7.0 h1:O37Iogk1lEkMRXewVtZ1BBTVn5JEp8GrJvP92bJqC6o= +go.opentelemetry.io/otel/trace v1.7.0/go.mod h1:fzLSB9nqR2eXzxPXb2JW9IKE+ScyXA48yyE4TNvoHqU= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220518034528-6f7dac969898 h1:SLP7Q4Di66FONjDJbCYrCRrh97focO6sLogHO7/g8F0= +golang.org/x/crypto v0.0.0-20220518034528-6f7dac969898/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20220518171630-0b5c67f07fdf h1:oXVg4h2qJDd9htKxb5SCpFBHLipW6hXmL3qpUixS2jw= +golang.org/x/exp v0.0.0-20220518171630-0b5c67f07fdf/go.mod h1:yh0Ynu2b5ZUe3MQfp2nM0ecK7wsgouWTDN0FNeJuIys= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 h1:kQgndtyPBW/JIYERgdxfwMYh3AVStj88WQTlNDi2a+o= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4 h1:HVyaeDAYux4pnY+D/SiwmLOR36ewZ4iGQIIrtnuCjFA= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/oauth2 v0.0.0-20170912212905-13449ad91cb2/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 h1:OSnWWcOd/CtWQC2cYSBgbTSJv3ciqd8r54ySIW2y3RE= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/sync v0.0.0-20170517211232-f52d1811a629/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201008141435-b3e1573b7520/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191206220618-eeba5f6aabab/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211107104306-e0b2ad06fe42/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6 h1:nonptSpoQ4vQjyraW20DXPAglgQfVnM9ZC6MmNLMR60= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/time v0.0.0-20170424234030-8be79e1e0910/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE= +golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190515012406-7d7faa4812bd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/tools v0.1.10 h1:QjFRCZxdOhBJ/UNgnBZLbNV13DlbnK0quyivTnXJM20= +golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.0.0-20170921000349-586095a6e407/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= +google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= +google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= +google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= +google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= +google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= +google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= +google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= +google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= +google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= +google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= +google.golang.org/api v0.80.0 h1:IQWaGVCYnsm4MO3hh+WtSXMzMzuyFx/fuR8qkN3A0Qo= +google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20170918111702-1e559d0a00ee/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210329143202-679c6ae281ee/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= +google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= +google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335 h1:2D0OT6tPVdrQTOnVe1VQjfJPTED6EZ7fdJ/f6Db6OsY= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/grpc v1.2.1-0.20170921194603-d4b75ebd4f9f/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.46.0 h1:oCjezcn6g6A75TGoKYBPgKmVBLexhYLM6MebdrPApP8= +google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b h1:QRR6H1YWRnHb4Y/HeNFCTJLFVxaq6wH4YuVdsUOr75U= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/go-playground/colors.v1 v1.2.0 h1:SPweMUve+ywPrfwao+UvfD5Ah78aOLUkT5RlJiZn52c= +gopkg.in/go-playground/colors.v1 v1.2.0/go.mod h1:AvbqcMpNXVl5gBrM20jBm3VjjKBbH/kI5UnqjU7lxFI= +gopkg.in/h2non/gock.v1 v1.1.2 h1:jBbHXgGBK/AoPVfJh5x4r/WxIrElvbLel8TCZkkZJoY= +gopkg.in/h2non/gock.v1 v1.1.2/go.mod h1:n7UGz/ckNChHiK05rDoiC4MYSunEC/lyaUm2WWaDva0= +gopkg.in/square/go-jose.v2 v2.6.0 h1:NGk74WTnPKBNUhNzQX7PYcTLUjoq7mzKk2OKbvwk2iI= +gopkg.in/square/go-jose.v2 v2.6.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/server/gqlgen.yml b/server/gqlgen.yml new file mode 100644 index 000000000..a0cb9a345 --- /dev/null +++ b/server/gqlgen.yml @@ -0,0 +1,33 @@ +# .gqlgen.yml example +# +# Refer to https://gqlgen.com/config/ +# for detailed .gqlgen.yml documentation. + +schema: + - schema.graphql +exec: + filename: internal/adapter/gql/generated.go +model: + filename: internal/adapter/gql/gqlmodel/models_gen.go +resolver: + filename: internal/adapter/gql/resolver.go + type: Resolver +models: + DateTime: + model: github.com/99designs/gqlgen/graphql.Time + FileSize: + model: github.com/99designs/gqlgen/graphql.Int64 + Cursor: + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Cursor + URL: + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.URL + TranslatedString: + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Map + Lang: + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Lang + ID: + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID + DatasetSchema: + fields: + totalCount: + resolver: true diff --git a/server/internal/adapter/context.go b/server/internal/adapter/context.go new file mode 100644 index 000000000..4025c2de4 --- /dev/null +++ b/server/internal/adapter/context.go @@ -0,0 +1,96 @@ +package adapter + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/user" + "golang.org/x/text/language" +) + +type ContextKey string + +const ( + contextUser ContextKey = "user" + contextOperator ContextKey = "operator" + contextAuthInfo ContextKey = "authinfo" + contextUsecases ContextKey = "usecases" +) + +var defaultLang = language.English + +type AuthInfo struct { + Token string + Sub string + Iss string + Name string + Email string + EmailVerified *bool +} + +func AttachUser(ctx context.Context, u *user.User) context.Context { + return context.WithValue(ctx, contextUser, u) +} + +func AttachOperator(ctx context.Context, o *usecase.Operator) context.Context { + return context.WithValue(ctx, contextOperator, o) +} + +func AttachAuthInfo(ctx context.Context, a AuthInfo) context.Context { + return context.WithValue(ctx, contextAuthInfo, a) +} + +func AttachUsecases(ctx context.Context, u *interfaces.Container) context.Context { + ctx = context.WithValue(ctx, contextUsecases, u) + return ctx +} + +func User(ctx context.Context) *user.User { + if v := ctx.Value(contextUser); v != nil { + if u, ok := v.(*user.User); ok { + return u + } + } + return nil +} + +func Lang(ctx context.Context, lang *language.Tag) string { + if lang != nil && !lang.IsRoot() { + return lang.String() + } + + u := User(ctx) + if u == nil { + return defaultLang.String() + } + + l := u.Lang() + if l.IsRoot() { + return defaultLang.String() + } + + return l.String() +} + +func Operator(ctx context.Context) *usecase.Operator { + if v := ctx.Value(contextOperator); v != nil { + if v2, ok := v.(*usecase.Operator); ok { + return v2 + } + } + return nil +} + +func GetAuthInfo(ctx context.Context) *AuthInfo { + if v := ctx.Value(contextAuthInfo); v != nil { + if v2, ok := v.(AuthInfo); ok { + return &v2 + } + } + return nil +} + +func Usecases(ctx context.Context) *interfaces.Container { + return ctx.Value(contextUsecases).(*interfaces.Container) +} diff --git a/server/internal/adapter/gql/context.go b/server/internal/adapter/gql/context.go new file mode 100644 index 000000000..b6ab687ae --- /dev/null +++ b/server/internal/adapter/gql/context.go @@ -0,0 +1,53 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/user" + "golang.org/x/text/language" +) + +type ContextKey string + +const ( + contextLoaders ContextKey = "loaders" + contextDataloaders ContextKey = "dataloaders" +) + +func AttachUsecases(ctx context.Context, u *interfaces.Container, enableDataLoaders bool) context.Context { + loaders := NewLoaders(u) + dataloaders := loaders.DataLoadersWith(ctx, enableDataLoaders) + + ctx = adapter.AttachUsecases(ctx, u) + ctx = context.WithValue(ctx, contextLoaders, loaders) + ctx = context.WithValue(ctx, contextDataloaders, dataloaders) + + return ctx +} + +func getUser(ctx context.Context) *user.User { + return adapter.User(ctx) +} + +func getLang(ctx context.Context, lang *language.Tag) string { + return adapter.Lang(ctx, lang) +} + +func getOperator(ctx context.Context) *usecase.Operator { + return adapter.Operator(ctx) +} + +func usecases(ctx context.Context) *interfaces.Container { + return adapter.Usecases(ctx) +} + +func loaders(ctx context.Context) *Loaders { + return ctx.Value(contextLoaders).(*Loaders) +} + +func dataloaders(ctx context.Context) *DataLoaders { + return ctx.Value(contextDataloaders).(*DataLoaders) +} diff --git a/server/internal/adapter/gql/generated.go b/server/internal/adapter/gql/generated.go new file mode 100644 index 000000000..33c28edd2 --- /dev/null +++ b/server/internal/adapter/gql/generated.go @@ -0,0 +1,59716 @@ +// Code generated by github.com/99designs/gqlgen, DO NOT EDIT. + +package gql + +import ( + "bytes" + "context" + "errors" + "fmt" + "net/url" + "strconv" + "sync" + "sync/atomic" + "time" + + "github.com/99designs/gqlgen/graphql" + "github.com/99designs/gqlgen/graphql/introspection" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" + gqlparser "github.com/vektah/gqlparser/v2" + "github.com/vektah/gqlparser/v2/ast" + "golang.org/x/text/language" +) + +// region ************************** generated!.gotpl ************************** + +// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface. +func NewExecutableSchema(cfg Config) graphql.ExecutableSchema { + return &executableSchema{ + resolvers: cfg.Resolvers, + directives: cfg.Directives, + complexity: cfg.Complexity, + } +} + +type Config struct { + Resolvers ResolverRoot + Directives DirectiveRoot + Complexity ComplexityRoot +} + +type ResolverRoot interface { + Asset() AssetResolver + Cluster() ClusterResolver + Dataset() DatasetResolver + DatasetField() DatasetFieldResolver + DatasetSchema() DatasetSchemaResolver + DatasetSchemaField() DatasetSchemaFieldResolver + Infobox() InfoboxResolver + InfoboxField() InfoboxFieldResolver + LayerGroup() LayerGroupResolver + LayerItem() LayerItemResolver + LayerTagGroup() LayerTagGroupResolver + LayerTagItem() LayerTagItemResolver + Me() MeResolver + MergedInfobox() MergedInfoboxResolver + MergedInfoboxField() MergedInfoboxFieldResolver + MergedLayer() MergedLayerResolver + MergedProperty() MergedPropertyResolver + MergedPropertyField() MergedPropertyFieldResolver + MergedPropertyGroup() MergedPropertyGroupResolver + Mutation() MutationResolver + Plugin() PluginResolver + PluginExtension() PluginExtensionResolver + Project() ProjectResolver + Property() PropertyResolver + PropertyField() PropertyFieldResolver + PropertyFieldLink() PropertyFieldLinkResolver + PropertyGroup() PropertyGroupResolver + PropertyGroupList() PropertyGroupListResolver + PropertyLinkableFields() PropertyLinkableFieldsResolver + PropertySchemaField() PropertySchemaFieldResolver + PropertySchemaFieldChoice() PropertySchemaFieldChoiceResolver + PropertySchemaGroup() PropertySchemaGroupResolver + Query() QueryResolver + Scene() SceneResolver + ScenePlugin() ScenePluginResolver + SceneWidget() SceneWidgetResolver + TagGroup() TagGroupResolver + TagItem() TagItemResolver + Team() TeamResolver + TeamMember() TeamMemberResolver +} + +type DirectiveRoot struct { +} + +type ComplexityRoot struct { + AddClusterPayload struct { + Cluster func(childComplexity int) int + Scene func(childComplexity int) int + } + + AddDatasetSchemaPayload struct { + DatasetSchema func(childComplexity int) int + } + + AddDynamicDatasetPayload struct { + Dataset func(childComplexity int) int + DatasetSchema func(childComplexity int) int + } + + AddDynamicDatasetSchemaPayload struct { + DatasetSchema func(childComplexity int) int + } + + AddInfoboxFieldPayload struct { + InfoboxField func(childComplexity int) int + Layer func(childComplexity int) int + } + + AddLayerGroupPayload struct { + Index func(childComplexity int) int + Layer func(childComplexity int) int + ParentLayer func(childComplexity int) int + } + + AddLayerItemPayload struct { + Index func(childComplexity int) int + Layer func(childComplexity int) int + ParentLayer func(childComplexity int) int + } + + AddMemberToTeamPayload struct { + Team func(childComplexity int) int + } + + AddWidgetPayload struct { + Scene func(childComplexity int) int + SceneWidget func(childComplexity int) int + } + + Asset struct { + ContentType func(childComplexity int) int + CreatedAt func(childComplexity int) int + ID func(childComplexity int) int + Name func(childComplexity int) int + Size func(childComplexity int) int + Team func(childComplexity int) int + TeamID func(childComplexity int) int + URL func(childComplexity int) int + } + + AssetConnection struct { + Edges func(childComplexity int) int + Nodes func(childComplexity int) int + PageInfo func(childComplexity int) int + TotalCount func(childComplexity int) int + } + + AssetEdge struct { + Cursor func(childComplexity int) int + Node func(childComplexity int) int + } + + AttachTagItemToGroupPayload struct { + Tag func(childComplexity int) int + } + + AttachTagToLayerPayload struct { + Layer func(childComplexity int) int + } + + Camera struct { + Altitude func(childComplexity int) int + Fov func(childComplexity int) int + Heading func(childComplexity int) int + Lat func(childComplexity int) int + Lng func(childComplexity int) int + Pitch func(childComplexity int) int + Roll func(childComplexity int) int + } + + Cluster struct { + ID func(childComplexity int) int + Name func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + } + + CreateAssetPayload struct { + Asset func(childComplexity int) int + } + + CreateInfoboxPayload struct { + Layer func(childComplexity int) int + } + + CreateScenePayload struct { + Scene func(childComplexity int) int + } + + CreateTagGroupPayload struct { + Tag func(childComplexity int) int + } + + CreateTagItemPayload struct { + Parent func(childComplexity int) int + Tag func(childComplexity int) int + } + + CreateTeamPayload struct { + Team func(childComplexity int) int + } + + Dataset struct { + Fields func(childComplexity int) int + ID func(childComplexity int) int + Name func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + Source func(childComplexity int) int + } + + DatasetConnection struct { + Edges func(childComplexity int) int + Nodes func(childComplexity int) int + PageInfo func(childComplexity int) int + TotalCount func(childComplexity int) int + } + + DatasetEdge struct { + Cursor func(childComplexity int) int + Node func(childComplexity int) int + } + + DatasetField struct { + Field func(childComplexity int) int + FieldID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + Source func(childComplexity int) int + Type func(childComplexity int) int + Value func(childComplexity int) int + ValueRef func(childComplexity int) int + } + + DatasetSchema struct { + Datasets func(childComplexity int, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + Dynamic func(childComplexity int) int + Fields func(childComplexity int) int + ID func(childComplexity int) int + Name func(childComplexity int) int + RepresentativeField func(childComplexity int) int + RepresentativeFieldID func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + Source func(childComplexity int) int + TotalCount func(childComplexity int) int + } + + DatasetSchemaConnection struct { + Edges func(childComplexity int) int + Nodes func(childComplexity int) int + PageInfo func(childComplexity int) int + TotalCount func(childComplexity int) int + } + + DatasetSchemaEdge struct { + Cursor func(childComplexity int) int + Node func(childComplexity int) int + } + + DatasetSchemaField struct { + ID func(childComplexity int) int + Name func(childComplexity int) int + Ref func(childComplexity int) int + RefID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + Source func(childComplexity int) int + Type func(childComplexity int) int + } + + DeleteMePayload struct { + UserID func(childComplexity int) int + } + + DeleteProjectPayload struct { + ProjectID func(childComplexity int) int + } + + DeleteTeamPayload struct { + TeamID func(childComplexity int) int + } + + DetachTagFromLayerPayload struct { + Layer func(childComplexity int) int + } + + DetachTagItemFromGroupPayload struct { + Tag func(childComplexity int) int + } + + ImportDatasetPayload struct { + DatasetSchema func(childComplexity int) int + } + + ImportLayerPayload struct { + Layers func(childComplexity int) int + ParentLayer func(childComplexity int) int + } + + Infobox struct { + Fields func(childComplexity int) int + Layer func(childComplexity int) int + LayerID func(childComplexity int) int + LinkedDataset func(childComplexity int) int + LinkedDatasetID func(childComplexity int) int + Merged func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + } + + InfoboxField struct { + Extension func(childComplexity int) int + ExtensionID func(childComplexity int) int + ID func(childComplexity int) int + Infobox func(childComplexity int) int + Layer func(childComplexity int) int + LayerID func(childComplexity int) int + LinkedDataset func(childComplexity int) int + LinkedDatasetID func(childComplexity int) int + Merged func(childComplexity int) int + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + ScenePlugin func(childComplexity int) int + } + + InstallPluginPayload struct { + Scene func(childComplexity int) int + ScenePlugin func(childComplexity int) int + } + + LatLng struct { + Lat func(childComplexity int) int + Lng func(childComplexity int) int + } + + LatLngHeight struct { + Height func(childComplexity int) int + Lat func(childComplexity int) int + Lng func(childComplexity int) int + } + + LayerGroup struct { + Extension func(childComplexity int) int + ExtensionID func(childComplexity int) int + ID func(childComplexity int) int + Infobox func(childComplexity int) int + IsVisible func(childComplexity int) int + LayerIds func(childComplexity int) int + Layers func(childComplexity int) int + LinkedDatasetSchema func(childComplexity int) int + LinkedDatasetSchemaID func(childComplexity int) int + Name func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + Root func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + ScenePlugin func(childComplexity int) int + Tags func(childComplexity int) int + } + + LayerItem struct { + Extension func(childComplexity int) int + ExtensionID func(childComplexity int) int + ID func(childComplexity int) int + Infobox func(childComplexity int) int + IsVisible func(childComplexity int) int + LinkedDataset func(childComplexity int) int + LinkedDatasetID func(childComplexity int) int + Merged func(childComplexity int) int + Name func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + ScenePlugin func(childComplexity int) int + Tags func(childComplexity int) int + } + + LayerTagGroup struct { + Children func(childComplexity int) int + Tag func(childComplexity int) int + TagID func(childComplexity int) int + } + + LayerTagItem struct { + Tag func(childComplexity int) int + TagID func(childComplexity int) int + } + + Me struct { + Auths func(childComplexity int) int + Email func(childComplexity int) int + ID func(childComplexity int) int + Lang func(childComplexity int) int + MyTeam func(childComplexity int) int + MyTeamID func(childComplexity int) int + Name func(childComplexity int) int + Teams func(childComplexity int) int + Theme func(childComplexity int) int + } + + MergedInfobox struct { + Fields func(childComplexity int) int + Property func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + } + + MergedInfoboxField struct { + Extension func(childComplexity int) int + ExtensionID func(childComplexity int) int + OriginalID func(childComplexity int) int + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + Property func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + ScenePlugin func(childComplexity int) int + } + + MergedLayer struct { + Infobox func(childComplexity int) int + Original func(childComplexity int) int + OriginalID func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + Property func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + } + + MergedProperty struct { + Groups func(childComplexity int) int + LinkedDataset func(childComplexity int) int + LinkedDatasetID func(childComplexity int) int + Original func(childComplexity int) int + OriginalID func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + } + + MergedPropertyField struct { + ActualValue func(childComplexity int) int + Field func(childComplexity int) int + FieldID func(childComplexity int) int + Links func(childComplexity int) int + Overridden func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + Type func(childComplexity int) int + Value func(childComplexity int) int + } + + MergedPropertyGroup struct { + Fields func(childComplexity int) int + Groups func(childComplexity int) int + LinkedDataset func(childComplexity int) int + LinkedDatasetID func(childComplexity int) int + Original func(childComplexity int) int + OriginalID func(childComplexity int) int + OriginalProperty func(childComplexity int) int + OriginalPropertyID func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + ParentProperty func(childComplexity int) int + ParentPropertyID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaGroupID func(childComplexity int) int + SchemaID func(childComplexity int) int + } + + MoveInfoboxFieldPayload struct { + Index func(childComplexity int) int + InfoboxFieldID func(childComplexity int) int + Layer func(childComplexity int) int + } + + MoveLayerPayload struct { + FromParentLayer func(childComplexity int) int + Index func(childComplexity int) int + LayerID func(childComplexity int) int + ToParentLayer func(childComplexity int) int + } + + Mutation struct { + AddCluster func(childComplexity int, input gqlmodel.AddClusterInput) int + AddDatasetSchema func(childComplexity int, input gqlmodel.AddDatasetSchemaInput) int + AddDynamicDataset func(childComplexity int, input gqlmodel.AddDynamicDatasetInput) int + AddDynamicDatasetSchema func(childComplexity int, input gqlmodel.AddDynamicDatasetSchemaInput) int + AddInfoboxField func(childComplexity int, input gqlmodel.AddInfoboxFieldInput) int + AddLayerGroup func(childComplexity int, input gqlmodel.AddLayerGroupInput) int + AddLayerItem func(childComplexity int, input gqlmodel.AddLayerItemInput) int + AddMemberToTeam func(childComplexity int, input gqlmodel.AddMemberToTeamInput) int + AddPropertyItem func(childComplexity int, input gqlmodel.AddPropertyItemInput) int + AddWidget func(childComplexity int, input gqlmodel.AddWidgetInput) int + AttachTagItemToGroup func(childComplexity int, input gqlmodel.AttachTagItemToGroupInput) int + AttachTagToLayer func(childComplexity int, input gqlmodel.AttachTagToLayerInput) int + CreateAsset func(childComplexity int, input gqlmodel.CreateAssetInput) int + CreateInfobox func(childComplexity int, input gqlmodel.CreateInfoboxInput) int + CreateProject func(childComplexity int, input gqlmodel.CreateProjectInput) int + CreateScene func(childComplexity int, input gqlmodel.CreateSceneInput) int + CreateTagGroup func(childComplexity int, input gqlmodel.CreateTagGroupInput) int + CreateTagItem func(childComplexity int, input gqlmodel.CreateTagItemInput) int + CreateTeam func(childComplexity int, input gqlmodel.CreateTeamInput) int + DeleteMe func(childComplexity int, input gqlmodel.DeleteMeInput) int + DeleteProject func(childComplexity int, input gqlmodel.DeleteProjectInput) int + DeleteTeam func(childComplexity int, input gqlmodel.DeleteTeamInput) int + DetachTagFromLayer func(childComplexity int, input gqlmodel.DetachTagFromLayerInput) int + DetachTagItemFromGroup func(childComplexity int, input gqlmodel.DetachTagItemFromGroupInput) int + ImportDataset func(childComplexity int, input gqlmodel.ImportDatasetInput) int + ImportDatasetFromGoogleSheet func(childComplexity int, input gqlmodel.ImportDatasetFromGoogleSheetInput) int + ImportLayer func(childComplexity int, input gqlmodel.ImportLayerInput) int + InstallPlugin func(childComplexity int, input gqlmodel.InstallPluginInput) int + LinkDatasetToPropertyValue func(childComplexity int, input gqlmodel.LinkDatasetToPropertyValueInput) int + MoveInfoboxField func(childComplexity int, input gqlmodel.MoveInfoboxFieldInput) int + MoveLayer func(childComplexity int, input gqlmodel.MoveLayerInput) int + MovePropertyItem func(childComplexity int, input gqlmodel.MovePropertyItemInput) int + PublishProject func(childComplexity int, input gqlmodel.PublishProjectInput) int + RemoveAsset func(childComplexity int, input gqlmodel.RemoveAssetInput) int + RemoveCluster func(childComplexity int, input gqlmodel.RemoveClusterInput) int + RemoveDatasetSchema func(childComplexity int, input gqlmodel.RemoveDatasetSchemaInput) int + RemoveInfobox func(childComplexity int, input gqlmodel.RemoveInfoboxInput) int + RemoveInfoboxField func(childComplexity int, input gqlmodel.RemoveInfoboxFieldInput) int + RemoveLayer func(childComplexity int, input gqlmodel.RemoveLayerInput) int + RemoveMemberFromTeam func(childComplexity int, input gqlmodel.RemoveMemberFromTeamInput) int + RemoveMyAuth func(childComplexity int, input gqlmodel.RemoveMyAuthInput) int + RemovePropertyField func(childComplexity int, input gqlmodel.RemovePropertyFieldInput) int + RemovePropertyItem func(childComplexity int, input gqlmodel.RemovePropertyItemInput) int + RemoveTag func(childComplexity int, input gqlmodel.RemoveTagInput) int + RemoveWidget func(childComplexity int, input gqlmodel.RemoveWidgetInput) int + Signup func(childComplexity int, input gqlmodel.SignupInput) int + SyncDataset func(childComplexity int, input gqlmodel.SyncDatasetInput) int + UninstallPlugin func(childComplexity int, input gqlmodel.UninstallPluginInput) int + UnlinkPropertyValue func(childComplexity int, input gqlmodel.UnlinkPropertyValueInput) int + UpdateCluster func(childComplexity int, input gqlmodel.UpdateClusterInput) int + UpdateDatasetSchema func(childComplexity int, input gqlmodel.UpdateDatasetSchemaInput) int + UpdateLayer func(childComplexity int, input gqlmodel.UpdateLayerInput) int + UpdateMe func(childComplexity int, input gqlmodel.UpdateMeInput) int + UpdateMemberOfTeam func(childComplexity int, input gqlmodel.UpdateMemberOfTeamInput) int + UpdateProject func(childComplexity int, input gqlmodel.UpdateProjectInput) int + UpdatePropertyItems func(childComplexity int, input gqlmodel.UpdatePropertyItemInput) int + UpdatePropertyValue func(childComplexity int, input gqlmodel.UpdatePropertyValueInput) int + UpdateTag func(childComplexity int, input gqlmodel.UpdateTagInput) int + UpdateTeam func(childComplexity int, input gqlmodel.UpdateTeamInput) int + UpdateWidget func(childComplexity int, input gqlmodel.UpdateWidgetInput) int + UpdateWidgetAlignSystem func(childComplexity int, input gqlmodel.UpdateWidgetAlignSystemInput) int + UpgradePlugin func(childComplexity int, input gqlmodel.UpgradePluginInput) int + UploadFileToProperty func(childComplexity int, input gqlmodel.UploadFileToPropertyInput) int + UploadPlugin func(childComplexity int, input gqlmodel.UploadPluginInput) int + } + + PageInfo struct { + EndCursor func(childComplexity int) int + HasNextPage func(childComplexity int) int + HasPreviousPage func(childComplexity int) int + StartCursor func(childComplexity int) int + } + + Plugin struct { + AllTranslatedDescription func(childComplexity int) int + AllTranslatedName func(childComplexity int) int + Author func(childComplexity int) int + Description func(childComplexity int) int + Extensions func(childComplexity int) int + ID func(childComplexity int) int + Name func(childComplexity int) int + PropertySchema func(childComplexity int) int + PropertySchemaID func(childComplexity int) int + RepositoryURL func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + ScenePlugin func(childComplexity int, sceneID *gqlmodel.ID) int + TranslatedDescription func(childComplexity int, lang *language.Tag) int + TranslatedName func(childComplexity int, lang *language.Tag) int + Version func(childComplexity int) int + } + + PluginExtension struct { + AllTranslatedDescription func(childComplexity int) int + AllTranslatedName func(childComplexity int) int + Description func(childComplexity int) int + ExtensionID func(childComplexity int) int + Icon func(childComplexity int) int + Name func(childComplexity int) int + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + PropertySchema func(childComplexity int) int + PropertySchemaID func(childComplexity int) int + SceneWidget func(childComplexity int, sceneID gqlmodel.ID) int + SingleOnly func(childComplexity int) int + TranslatedDescription func(childComplexity int, lang *language.Tag) int + TranslatedName func(childComplexity int, lang *language.Tag) int + Type func(childComplexity int) int + Visualizer func(childComplexity int) int + WidgetLayout func(childComplexity int) int + } + + Project struct { + Alias func(childComplexity int) int + BasicAuthPassword func(childComplexity int) int + BasicAuthUsername func(childComplexity int) int + CreatedAt func(childComplexity int) int + Description func(childComplexity int) int + ID func(childComplexity int) int + ImageURL func(childComplexity int) int + IsArchived func(childComplexity int) int + IsBasicAuthActive func(childComplexity int) int + Name func(childComplexity int) int + PublicDescription func(childComplexity int) int + PublicImage func(childComplexity int) int + PublicNoIndex func(childComplexity int) int + PublicTitle func(childComplexity int) int + PublishedAt func(childComplexity int) int + PublishmentStatus func(childComplexity int) int + Scene func(childComplexity int) int + Team func(childComplexity int) int + TeamID func(childComplexity int) int + UpdatedAt func(childComplexity int) int + Visualizer func(childComplexity int) int + } + + ProjectAliasAvailability struct { + Alias func(childComplexity int) int + Available func(childComplexity int) int + } + + ProjectConnection struct { + Edges func(childComplexity int) int + Nodes func(childComplexity int) int + PageInfo func(childComplexity int) int + TotalCount func(childComplexity int) int + } + + ProjectEdge struct { + Cursor func(childComplexity int) int + Node func(childComplexity int) int + } + + ProjectPayload struct { + Project func(childComplexity int) int + } + + Property struct { + ID func(childComplexity int) int + Items func(childComplexity int) int + Layer func(childComplexity int) int + Merged func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + } + + PropertyCondition struct { + FieldID func(childComplexity int) int + Type func(childComplexity int) int + Value func(childComplexity int) int + } + + PropertyField struct { + ActualValue func(childComplexity int) int + Field func(childComplexity int) int + FieldID func(childComplexity int) int + ID func(childComplexity int) int + Links func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + Type func(childComplexity int) int + Value func(childComplexity int) int + } + + PropertyFieldLink struct { + Dataset func(childComplexity int) int + DatasetField func(childComplexity int) int + DatasetID func(childComplexity int) int + DatasetSchema func(childComplexity int) int + DatasetSchemaField func(childComplexity int) int + DatasetSchemaFieldID func(childComplexity int) int + DatasetSchemaID func(childComplexity int) int + } + + PropertyFieldPayload struct { + Property func(childComplexity int) int + PropertyField func(childComplexity int) int + } + + PropertyGroup struct { + Fields func(childComplexity int) int + ID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaGroup func(childComplexity int) int + SchemaGroupID func(childComplexity int) int + SchemaID func(childComplexity int) int + } + + PropertyGroupList struct { + Groups func(childComplexity int) int + ID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaGroup func(childComplexity int) int + SchemaGroupID func(childComplexity int) int + SchemaID func(childComplexity int) int + } + + PropertyItemPayload struct { + Property func(childComplexity int) int + PropertyItem func(childComplexity int) int + } + + PropertyLinkableFields struct { + Latlng func(childComplexity int) int + LatlngField func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + URL func(childComplexity int) int + URLField func(childComplexity int) int + } + + PropertySchema struct { + Groups func(childComplexity int) int + ID func(childComplexity int) int + LinkableFields func(childComplexity int) int + } + + PropertySchemaField struct { + AllTranslatedDescription func(childComplexity int) int + AllTranslatedTitle func(childComplexity int) int + Choices func(childComplexity int) int + DefaultValue func(childComplexity int) int + Description func(childComplexity int) int + FieldID func(childComplexity int) int + IsAvailableIf func(childComplexity int) int + Max func(childComplexity int) int + Min func(childComplexity int) int + Prefix func(childComplexity int) int + Suffix func(childComplexity int) int + Title func(childComplexity int) int + TranslatedDescription func(childComplexity int, lang *language.Tag) int + TranslatedTitle func(childComplexity int, lang *language.Tag) int + Type func(childComplexity int) int + UI func(childComplexity int) int + } + + PropertySchemaFieldChoice struct { + AllTranslatedTitle func(childComplexity int) int + Icon func(childComplexity int) int + Key func(childComplexity int) int + Title func(childComplexity int) int + TranslatedTitle func(childComplexity int, lang *language.Tag) int + } + + PropertySchemaGroup struct { + AllTranslatedTitle func(childComplexity int) int + Fields func(childComplexity int) int + IsAvailableIf func(childComplexity int) int + IsList func(childComplexity int) int + RepresentativeField func(childComplexity int) int + RepresentativeFieldID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaGroupID func(childComplexity int) int + SchemaID func(childComplexity int) int + Title func(childComplexity int) int + TranslatedTitle func(childComplexity int, lang *language.Tag) int + } + + Query struct { + Assets func(childComplexity int, teamID gqlmodel.ID, keyword *string, sort *gqlmodel.AssetSortType, pagination *gqlmodel.Pagination) int + CheckProjectAlias func(childComplexity int, alias string) int + DatasetSchemas func(childComplexity int, sceneID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + Datasets func(childComplexity int, datasetSchemaID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + DynamicDatasetSchemas func(childComplexity int, sceneID gqlmodel.ID) int + Layer func(childComplexity int, id gqlmodel.ID) int + Me func(childComplexity int) int + Node func(childComplexity int, id gqlmodel.ID, typeArg gqlmodel.NodeType) int + Nodes func(childComplexity int, id []gqlmodel.ID, typeArg gqlmodel.NodeType) int + Plugin func(childComplexity int, id gqlmodel.ID) int + Plugins func(childComplexity int, id []gqlmodel.ID) int + Projects func(childComplexity int, teamID gqlmodel.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + PropertySchema func(childComplexity int, id gqlmodel.ID) int + PropertySchemas func(childComplexity int, id []gqlmodel.ID) int + Scene func(childComplexity int, projectID gqlmodel.ID) int + SearchUser func(childComplexity int, nameOrEmail string) int + } + + Rect struct { + East func(childComplexity int) int + North func(childComplexity int) int + South func(childComplexity int) int + West func(childComplexity int) int + } + + RemoveAssetPayload struct { + AssetID func(childComplexity int) int + } + + RemoveClusterPayload struct { + ClusterID func(childComplexity int) int + Scene func(childComplexity int) int + } + + RemoveDatasetSchemaPayload struct { + SchemaID func(childComplexity int) int + } + + RemoveInfoboxFieldPayload struct { + InfoboxFieldID func(childComplexity int) int + Layer func(childComplexity int) int + } + + RemoveInfoboxPayload struct { + Layer func(childComplexity int) int + } + + RemoveLayerPayload struct { + LayerID func(childComplexity int) int + ParentLayer func(childComplexity int) int + } + + RemoveMemberFromTeamPayload struct { + Team func(childComplexity int) int + } + + RemoveTagPayload struct { + TagID func(childComplexity int) int + UpdatedLayers func(childComplexity int) int + } + + RemoveWidgetPayload struct { + Scene func(childComplexity int) int + WidgetID func(childComplexity int) int + } + + Scene struct { + Clusters func(childComplexity int) int + CreatedAt func(childComplexity int) int + DatasetSchemas func(childComplexity int, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + DynamicDatasetSchemas func(childComplexity int) int + ID func(childComplexity int) int + Plugins func(childComplexity int) int + Project func(childComplexity int) int + ProjectID func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + RootLayer func(childComplexity int) int + RootLayerID func(childComplexity int) int + TagIds func(childComplexity int) int + Tags func(childComplexity int) int + Team func(childComplexity int) int + TeamID func(childComplexity int) int + UpdatedAt func(childComplexity int) int + WidgetAlignSystem func(childComplexity int) int + Widgets func(childComplexity int) int + } + + ScenePlugin struct { + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + } + + SceneWidget struct { + Enabled func(childComplexity int) int + Extended func(childComplexity int) int + Extension func(childComplexity int) int + ExtensionID func(childComplexity int) int + ID func(childComplexity int) int + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + } + + SignupPayload struct { + Team func(childComplexity int) int + User func(childComplexity int) int + } + + SyncDatasetPayload struct { + Dataset func(childComplexity int) int + DatasetSchema func(childComplexity int) int + SceneID func(childComplexity int) int + URL func(childComplexity int) int + } + + TagGroup struct { + ID func(childComplexity int) int + Label func(childComplexity int) int + Layers func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + TagIds func(childComplexity int) int + Tags func(childComplexity int) int + } + + TagItem struct { + ID func(childComplexity int) int + Label func(childComplexity int) int + Layers func(childComplexity int) int + LinkedDataset func(childComplexity int) int + LinkedDatasetField func(childComplexity int) int + LinkedDatasetFieldID func(childComplexity int) int + LinkedDatasetID func(childComplexity int) int + LinkedDatasetSchema func(childComplexity int) int + LinkedDatasetSchemaID func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + SceneID func(childComplexity int) int + } + + Team struct { + Assets func(childComplexity int, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + ID func(childComplexity int) int + Members func(childComplexity int) int + Name func(childComplexity int) int + Personal func(childComplexity int) int + Projects func(childComplexity int, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + } + + TeamMember struct { + Role func(childComplexity int) int + User func(childComplexity int) int + UserID func(childComplexity int) int + } + + Typography struct { + Bold func(childComplexity int) int + Color func(childComplexity int) int + FontFamily func(childComplexity int) int + FontSize func(childComplexity int) int + FontWeight func(childComplexity int) int + Italic func(childComplexity int) int + TextAlign func(childComplexity int) int + Underline func(childComplexity int) int + } + + UninstallPluginPayload struct { + PluginID func(childComplexity int) int + Scene func(childComplexity int) int + } + + UpdateClusterPayload struct { + Cluster func(childComplexity int) int + Scene func(childComplexity int) int + } + + UpdateDatasetSchemaPayload struct { + DatasetSchema func(childComplexity int) int + } + + UpdateLayerPayload struct { + Layer func(childComplexity int) int + } + + UpdateMePayload struct { + Me func(childComplexity int) int + } + + UpdateMemberOfTeamPayload struct { + Team func(childComplexity int) int + } + + UpdateTagPayload struct { + Tag func(childComplexity int) int + } + + UpdateTeamPayload struct { + Team func(childComplexity int) int + } + + UpdateWidgetAlignSystemPayload struct { + Scene func(childComplexity int) int + } + + UpdateWidgetPayload struct { + Scene func(childComplexity int) int + SceneWidget func(childComplexity int) int + } + + UpgradePluginPayload struct { + Scene func(childComplexity int) int + ScenePlugin func(childComplexity int) int + } + + UploadPluginPayload struct { + Plugin func(childComplexity int) int + Scene func(childComplexity int) int + ScenePlugin func(childComplexity int) int + } + + User struct { + Email func(childComplexity int) int + ID func(childComplexity int) int + Name func(childComplexity int) int + } + + WidgetAlignSystem struct { + Inner func(childComplexity int) int + Outer func(childComplexity int) int + } + + WidgetArea struct { + Align func(childComplexity int) int + WidgetIds func(childComplexity int) int + } + + WidgetExtendable struct { + Horizontally func(childComplexity int) int + Vertically func(childComplexity int) int + } + + WidgetLayout struct { + DefaultLocation func(childComplexity int) int + Extendable func(childComplexity int) int + Extended func(childComplexity int) int + Floating func(childComplexity int) int + } + + WidgetLocation struct { + Area func(childComplexity int) int + Section func(childComplexity int) int + Zone func(childComplexity int) int + } + + WidgetSection struct { + Bottom func(childComplexity int) int + Middle func(childComplexity int) int + Top func(childComplexity int) int + } + + WidgetZone struct { + Center func(childComplexity int) int + Left func(childComplexity int) int + Right func(childComplexity int) int + } +} + +type AssetResolver interface { + Team(ctx context.Context, obj *gqlmodel.Asset) (*gqlmodel.Team, error) +} +type ClusterResolver interface { + Property(ctx context.Context, obj *gqlmodel.Cluster) (*gqlmodel.Property, error) +} +type DatasetResolver interface { + Schema(ctx context.Context, obj *gqlmodel.Dataset) (*gqlmodel.DatasetSchema, error) + Name(ctx context.Context, obj *gqlmodel.Dataset) (*string, error) +} +type DatasetFieldResolver interface { + Schema(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.DatasetSchema, error) + Field(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.DatasetSchemaField, error) + ValueRef(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.Dataset, error) +} +type DatasetSchemaResolver interface { + TotalCount(ctx context.Context, obj *gqlmodel.DatasetSchema) (int, error) + + Datasets(ctx context.Context, obj *gqlmodel.DatasetSchema, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) + Scene(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.Scene, error) + RepresentativeField(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.DatasetSchemaField, error) +} +type DatasetSchemaFieldResolver interface { + Schema(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) + Ref(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) +} +type InfoboxResolver interface { + Layer(ctx context.Context, obj *gqlmodel.Infobox) (gqlmodel.Layer, error) + Property(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Property, error) + LinkedDataset(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Dataset, error) + Merged(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.MergedInfobox, error) + Scene(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Scene, error) +} +type InfoboxFieldResolver interface { + Layer(ctx context.Context, obj *gqlmodel.InfoboxField) (gqlmodel.Layer, error) + Infobox(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Infobox, error) + Property(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Property, error) + Plugin(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Plugin, error) + Extension(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.PluginExtension, error) + LinkedDataset(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Dataset, error) + Merged(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.MergedInfoboxField, error) + Scene(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Scene, error) + ScenePlugin(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.ScenePlugin, error) +} +type LayerGroupResolver interface { + Parent(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.LayerGroup, error) + Property(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Property, error) + Plugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Plugin, error) + Extension(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.PluginExtension, error) + LinkedDatasetSchema(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.DatasetSchema, error) + Layers(ctx context.Context, obj *gqlmodel.LayerGroup) ([]gqlmodel.Layer, error) + Scene(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Scene, error) + ScenePlugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.ScenePlugin, error) +} +type LayerItemResolver interface { + Parent(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.LayerGroup, error) + Property(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Property, error) + Plugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Plugin, error) + Extension(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.PluginExtension, error) + LinkedDataset(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Dataset, error) + Merged(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.MergedLayer, error) + Scene(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Scene, error) + ScenePlugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.ScenePlugin, error) +} +type LayerTagGroupResolver interface { + Tag(ctx context.Context, obj *gqlmodel.LayerTagGroup) (gqlmodel.Tag, error) +} +type LayerTagItemResolver interface { + Tag(ctx context.Context, obj *gqlmodel.LayerTagItem) (gqlmodel.Tag, error) +} +type MeResolver interface { + Teams(ctx context.Context, obj *gqlmodel.Me) ([]*gqlmodel.Team, error) + MyTeam(ctx context.Context, obj *gqlmodel.Me) (*gqlmodel.Team, error) +} +type MergedInfoboxResolver interface { + Scene(ctx context.Context, obj *gqlmodel.MergedInfobox) (*gqlmodel.Scene, error) +} +type MergedInfoboxFieldResolver interface { + Plugin(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.Plugin, error) + Extension(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.PluginExtension, error) + Scene(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.Scene, error) + ScenePlugin(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.ScenePlugin, error) +} +type MergedLayerResolver interface { + Original(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerItem, error) + Parent(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerGroup, error) + Scene(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.Scene, error) +} +type MergedPropertyResolver interface { + Original(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Property, error) + Parent(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Property, error) + Schema(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.PropertySchema, error) + LinkedDataset(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Dataset, error) + Groups(ctx context.Context, obj *gqlmodel.MergedProperty) ([]*gqlmodel.MergedPropertyGroup, error) +} +type MergedPropertyFieldResolver interface { + Schema(ctx context.Context, obj *gqlmodel.MergedPropertyField) (*gqlmodel.PropertySchema, error) + Field(ctx context.Context, obj *gqlmodel.MergedPropertyField) (*gqlmodel.PropertySchemaField, error) + ActualValue(ctx context.Context, obj *gqlmodel.MergedPropertyField) (interface{}, error) +} +type MergedPropertyGroupResolver interface { + OriginalProperty(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Property, error) + ParentProperty(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Property, error) + Original(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertyGroup, error) + Parent(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertyGroup, error) + Schema(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertySchema, error) + LinkedDataset(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Dataset, error) +} +type MutationResolver interface { + CreateAsset(ctx context.Context, input gqlmodel.CreateAssetInput) (*gqlmodel.CreateAssetPayload, error) + RemoveAsset(ctx context.Context, input gqlmodel.RemoveAssetInput) (*gqlmodel.RemoveAssetPayload, error) + Signup(ctx context.Context, input gqlmodel.SignupInput) (*gqlmodel.SignupPayload, error) + UpdateMe(ctx context.Context, input gqlmodel.UpdateMeInput) (*gqlmodel.UpdateMePayload, error) + RemoveMyAuth(ctx context.Context, input gqlmodel.RemoveMyAuthInput) (*gqlmodel.UpdateMePayload, error) + DeleteMe(ctx context.Context, input gqlmodel.DeleteMeInput) (*gqlmodel.DeleteMePayload, error) + CreateTeam(ctx context.Context, input gqlmodel.CreateTeamInput) (*gqlmodel.CreateTeamPayload, error) + DeleteTeam(ctx context.Context, input gqlmodel.DeleteTeamInput) (*gqlmodel.DeleteTeamPayload, error) + UpdateTeam(ctx context.Context, input gqlmodel.UpdateTeamInput) (*gqlmodel.UpdateTeamPayload, error) + AddMemberToTeam(ctx context.Context, input gqlmodel.AddMemberToTeamInput) (*gqlmodel.AddMemberToTeamPayload, error) + RemoveMemberFromTeam(ctx context.Context, input gqlmodel.RemoveMemberFromTeamInput) (*gqlmodel.RemoveMemberFromTeamPayload, error) + UpdateMemberOfTeam(ctx context.Context, input gqlmodel.UpdateMemberOfTeamInput) (*gqlmodel.UpdateMemberOfTeamPayload, error) + CreateProject(ctx context.Context, input gqlmodel.CreateProjectInput) (*gqlmodel.ProjectPayload, error) + UpdateProject(ctx context.Context, input gqlmodel.UpdateProjectInput) (*gqlmodel.ProjectPayload, error) + PublishProject(ctx context.Context, input gqlmodel.PublishProjectInput) (*gqlmodel.ProjectPayload, error) + DeleteProject(ctx context.Context, input gqlmodel.DeleteProjectInput) (*gqlmodel.DeleteProjectPayload, error) + CreateScene(ctx context.Context, input gqlmodel.CreateSceneInput) (*gqlmodel.CreateScenePayload, error) + AddWidget(ctx context.Context, input gqlmodel.AddWidgetInput) (*gqlmodel.AddWidgetPayload, error) + UpdateWidget(ctx context.Context, input gqlmodel.UpdateWidgetInput) (*gqlmodel.UpdateWidgetPayload, error) + UpdateWidgetAlignSystem(ctx context.Context, input gqlmodel.UpdateWidgetAlignSystemInput) (*gqlmodel.UpdateWidgetAlignSystemPayload, error) + RemoveWidget(ctx context.Context, input gqlmodel.RemoveWidgetInput) (*gqlmodel.RemoveWidgetPayload, error) + InstallPlugin(ctx context.Context, input gqlmodel.InstallPluginInput) (*gqlmodel.InstallPluginPayload, error) + UninstallPlugin(ctx context.Context, input gqlmodel.UninstallPluginInput) (*gqlmodel.UninstallPluginPayload, error) + UploadPlugin(ctx context.Context, input gqlmodel.UploadPluginInput) (*gqlmodel.UploadPluginPayload, error) + UpgradePlugin(ctx context.Context, input gqlmodel.UpgradePluginInput) (*gqlmodel.UpgradePluginPayload, error) + AddCluster(ctx context.Context, input gqlmodel.AddClusterInput) (*gqlmodel.AddClusterPayload, error) + UpdateCluster(ctx context.Context, input gqlmodel.UpdateClusterInput) (*gqlmodel.UpdateClusterPayload, error) + RemoveCluster(ctx context.Context, input gqlmodel.RemoveClusterInput) (*gqlmodel.RemoveClusterPayload, error) + UpdateDatasetSchema(ctx context.Context, input gqlmodel.UpdateDatasetSchemaInput) (*gqlmodel.UpdateDatasetSchemaPayload, error) + SyncDataset(ctx context.Context, input gqlmodel.SyncDatasetInput) (*gqlmodel.SyncDatasetPayload, error) + AddDynamicDatasetSchema(ctx context.Context, input gqlmodel.AddDynamicDatasetSchemaInput) (*gqlmodel.AddDynamicDatasetSchemaPayload, error) + AddDynamicDataset(ctx context.Context, input gqlmodel.AddDynamicDatasetInput) (*gqlmodel.AddDynamicDatasetPayload, error) + RemoveDatasetSchema(ctx context.Context, input gqlmodel.RemoveDatasetSchemaInput) (*gqlmodel.RemoveDatasetSchemaPayload, error) + ImportDataset(ctx context.Context, input gqlmodel.ImportDatasetInput) (*gqlmodel.ImportDatasetPayload, error) + ImportDatasetFromGoogleSheet(ctx context.Context, input gqlmodel.ImportDatasetFromGoogleSheetInput) (*gqlmodel.ImportDatasetPayload, error) + AddDatasetSchema(ctx context.Context, input gqlmodel.AddDatasetSchemaInput) (*gqlmodel.AddDatasetSchemaPayload, error) + UpdatePropertyValue(ctx context.Context, input gqlmodel.UpdatePropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) + RemovePropertyField(ctx context.Context, input gqlmodel.RemovePropertyFieldInput) (*gqlmodel.PropertyFieldPayload, error) + UploadFileToProperty(ctx context.Context, input gqlmodel.UploadFileToPropertyInput) (*gqlmodel.PropertyFieldPayload, error) + LinkDatasetToPropertyValue(ctx context.Context, input gqlmodel.LinkDatasetToPropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) + UnlinkPropertyValue(ctx context.Context, input gqlmodel.UnlinkPropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) + AddPropertyItem(ctx context.Context, input gqlmodel.AddPropertyItemInput) (*gqlmodel.PropertyItemPayload, error) + MovePropertyItem(ctx context.Context, input gqlmodel.MovePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) + RemovePropertyItem(ctx context.Context, input gqlmodel.RemovePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) + UpdatePropertyItems(ctx context.Context, input gqlmodel.UpdatePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) + AddLayerItem(ctx context.Context, input gqlmodel.AddLayerItemInput) (*gqlmodel.AddLayerItemPayload, error) + AddLayerGroup(ctx context.Context, input gqlmodel.AddLayerGroupInput) (*gqlmodel.AddLayerGroupPayload, error) + RemoveLayer(ctx context.Context, input gqlmodel.RemoveLayerInput) (*gqlmodel.RemoveLayerPayload, error) + UpdateLayer(ctx context.Context, input gqlmodel.UpdateLayerInput) (*gqlmodel.UpdateLayerPayload, error) + MoveLayer(ctx context.Context, input gqlmodel.MoveLayerInput) (*gqlmodel.MoveLayerPayload, error) + CreateInfobox(ctx context.Context, input gqlmodel.CreateInfoboxInput) (*gqlmodel.CreateInfoboxPayload, error) + RemoveInfobox(ctx context.Context, input gqlmodel.RemoveInfoboxInput) (*gqlmodel.RemoveInfoboxPayload, error) + AddInfoboxField(ctx context.Context, input gqlmodel.AddInfoboxFieldInput) (*gqlmodel.AddInfoboxFieldPayload, error) + MoveInfoboxField(ctx context.Context, input gqlmodel.MoveInfoboxFieldInput) (*gqlmodel.MoveInfoboxFieldPayload, error) + RemoveInfoboxField(ctx context.Context, input gqlmodel.RemoveInfoboxFieldInput) (*gqlmodel.RemoveInfoboxFieldPayload, error) + ImportLayer(ctx context.Context, input gqlmodel.ImportLayerInput) (*gqlmodel.ImportLayerPayload, error) + AttachTagToLayer(ctx context.Context, input gqlmodel.AttachTagToLayerInput) (*gqlmodel.AttachTagToLayerPayload, error) + DetachTagFromLayer(ctx context.Context, input gqlmodel.DetachTagFromLayerInput) (*gqlmodel.DetachTagFromLayerPayload, error) + CreateTagItem(ctx context.Context, input gqlmodel.CreateTagItemInput) (*gqlmodel.CreateTagItemPayload, error) + CreateTagGroup(ctx context.Context, input gqlmodel.CreateTagGroupInput) (*gqlmodel.CreateTagGroupPayload, error) + AttachTagItemToGroup(ctx context.Context, input gqlmodel.AttachTagItemToGroupInput) (*gqlmodel.AttachTagItemToGroupPayload, error) + DetachTagItemFromGroup(ctx context.Context, input gqlmodel.DetachTagItemFromGroupInput) (*gqlmodel.DetachTagItemFromGroupPayload, error) + UpdateTag(ctx context.Context, input gqlmodel.UpdateTagInput) (*gqlmodel.UpdateTagPayload, error) + RemoveTag(ctx context.Context, input gqlmodel.RemoveTagInput) (*gqlmodel.RemoveTagPayload, error) +} +type PluginResolver interface { + Scene(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.Scene, error) + TranslatedName(ctx context.Context, obj *gqlmodel.Plugin, lang *language.Tag) (string, error) + TranslatedDescription(ctx context.Context, obj *gqlmodel.Plugin, lang *language.Tag) (string, error) + PropertySchema(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.PropertySchema, error) +} +type PluginExtensionResolver interface { + Plugin(ctx context.Context, obj *gqlmodel.PluginExtension) (*gqlmodel.Plugin, error) + SceneWidget(ctx context.Context, obj *gqlmodel.PluginExtension, sceneID gqlmodel.ID) (*gqlmodel.SceneWidget, error) + PropertySchema(ctx context.Context, obj *gqlmodel.PluginExtension) (*gqlmodel.PropertySchema, error) + TranslatedName(ctx context.Context, obj *gqlmodel.PluginExtension, lang *language.Tag) (string, error) + TranslatedDescription(ctx context.Context, obj *gqlmodel.PluginExtension, lang *language.Tag) (string, error) +} +type ProjectResolver interface { + Team(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Team, error) + Scene(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Scene, error) +} +type PropertyResolver interface { + Schema(ctx context.Context, obj *gqlmodel.Property) (*gqlmodel.PropertySchema, error) + Layer(ctx context.Context, obj *gqlmodel.Property) (gqlmodel.Layer, error) + Merged(ctx context.Context, obj *gqlmodel.Property) (*gqlmodel.MergedProperty, error) +} +type PropertyFieldResolver interface { + Parent(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.Property, error) + Schema(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.PropertySchema, error) + Field(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.PropertySchemaField, error) + ActualValue(ctx context.Context, obj *gqlmodel.PropertyField) (interface{}, error) +} +type PropertyFieldLinkResolver interface { + Dataset(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.Dataset, error) + DatasetField(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetField, error) + DatasetSchema(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetSchema, error) + DatasetSchemaField(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetSchemaField, error) +} +type PropertyGroupResolver interface { + Schema(ctx context.Context, obj *gqlmodel.PropertyGroup) (*gqlmodel.PropertySchema, error) + SchemaGroup(ctx context.Context, obj *gqlmodel.PropertyGroup) (*gqlmodel.PropertySchemaGroup, error) +} +type PropertyGroupListResolver interface { + Schema(ctx context.Context, obj *gqlmodel.PropertyGroupList) (*gqlmodel.PropertySchema, error) + SchemaGroup(ctx context.Context, obj *gqlmodel.PropertyGroupList) (*gqlmodel.PropertySchemaGroup, error) +} +type PropertyLinkableFieldsResolver interface { + LatlngField(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchemaField, error) + URLField(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchemaField, error) + Schema(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchema, error) +} +type PropertySchemaFieldResolver interface { + TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *language.Tag) (string, error) + TranslatedDescription(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *language.Tag) (string, error) +} +type PropertySchemaFieldChoiceResolver interface { + TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *language.Tag) (string, error) +} +type PropertySchemaGroupResolver interface { + Schema(ctx context.Context, obj *gqlmodel.PropertySchemaGroup) (*gqlmodel.PropertySchema, error) + TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaGroup, lang *language.Tag) (string, error) +} +type QueryResolver interface { + Me(ctx context.Context) (*gqlmodel.Me, error) + Node(ctx context.Context, id gqlmodel.ID, typeArg gqlmodel.NodeType) (gqlmodel.Node, error) + Nodes(ctx context.Context, id []gqlmodel.ID, typeArg gqlmodel.NodeType) ([]gqlmodel.Node, error) + PropertySchema(ctx context.Context, id gqlmodel.ID) (*gqlmodel.PropertySchema, error) + PropertySchemas(ctx context.Context, id []gqlmodel.ID) ([]*gqlmodel.PropertySchema, error) + Plugin(ctx context.Context, id gqlmodel.ID) (*gqlmodel.Plugin, error) + Plugins(ctx context.Context, id []gqlmodel.ID) ([]*gqlmodel.Plugin, error) + Layer(ctx context.Context, id gqlmodel.ID) (gqlmodel.Layer, error) + Scene(ctx context.Context, projectID gqlmodel.ID) (*gqlmodel.Scene, error) + Assets(ctx context.Context, teamID gqlmodel.ID, keyword *string, sort *gqlmodel.AssetSortType, pagination *gqlmodel.Pagination) (*gqlmodel.AssetConnection, error) + Projects(ctx context.Context, teamID gqlmodel.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) + DatasetSchemas(ctx context.Context, sceneID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) + Datasets(ctx context.Context, datasetSchemaID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) + DynamicDatasetSchemas(ctx context.Context, sceneID gqlmodel.ID) ([]*gqlmodel.DatasetSchema, error) + SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.User, error) + CheckProjectAlias(ctx context.Context, alias string) (*gqlmodel.ProjectAliasAvailability, error) +} +type SceneResolver interface { + Project(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Project, error) + Team(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Team, error) + Property(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Property, error) + RootLayer(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.LayerGroup, error) + DatasetSchemas(ctx context.Context, obj *gqlmodel.Scene, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) + + Tags(ctx context.Context, obj *gqlmodel.Scene) ([]gqlmodel.Tag, error) +} +type ScenePluginResolver interface { + Plugin(ctx context.Context, obj *gqlmodel.ScenePlugin) (*gqlmodel.Plugin, error) + Property(ctx context.Context, obj *gqlmodel.ScenePlugin) (*gqlmodel.Property, error) +} +type SceneWidgetResolver interface { + Plugin(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Plugin, error) + Extension(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.PluginExtension, error) + Property(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Property, error) +} +type TagGroupResolver interface { + Tags(ctx context.Context, obj *gqlmodel.TagGroup) ([]*gqlmodel.TagItem, error) + Scene(ctx context.Context, obj *gqlmodel.TagGroup) (*gqlmodel.Scene, error) + Layers(ctx context.Context, obj *gqlmodel.TagGroup) ([]gqlmodel.Layer, error) +} +type TagItemResolver interface { + LinkedDatasetSchema(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetSchema, error) + LinkedDataset(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.Dataset, error) + LinkedDatasetField(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetField, error) + Parent(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.TagGroup, error) + Layers(ctx context.Context, obj *gqlmodel.TagItem) ([]gqlmodel.Layer, error) +} +type TeamResolver interface { + Assets(ctx context.Context, obj *gqlmodel.Team, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) + Projects(ctx context.Context, obj *gqlmodel.Team, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) +} +type TeamMemberResolver interface { + User(ctx context.Context, obj *gqlmodel.TeamMember) (*gqlmodel.User, error) +} + +type executableSchema struct { + resolvers ResolverRoot + directives DirectiveRoot + complexity ComplexityRoot +} + +func (e *executableSchema) Schema() *ast.Schema { + return parsedSchema +} + +func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) { + ec := executionContext{nil, e} + _ = ec + switch typeName + "." + field { + + case "AddClusterPayload.cluster": + if e.complexity.AddClusterPayload.Cluster == nil { + break + } + + return e.complexity.AddClusterPayload.Cluster(childComplexity), true + + case "AddClusterPayload.scene": + if e.complexity.AddClusterPayload.Scene == nil { + break + } + + return e.complexity.AddClusterPayload.Scene(childComplexity), true + + case "AddDatasetSchemaPayload.datasetSchema": + if e.complexity.AddDatasetSchemaPayload.DatasetSchema == nil { + break + } + + return e.complexity.AddDatasetSchemaPayload.DatasetSchema(childComplexity), true + + case "AddDynamicDatasetPayload.dataset": + if e.complexity.AddDynamicDatasetPayload.Dataset == nil { + break + } + + return e.complexity.AddDynamicDatasetPayload.Dataset(childComplexity), true + + case "AddDynamicDatasetPayload.datasetSchema": + if e.complexity.AddDynamicDatasetPayload.DatasetSchema == nil { + break + } + + return e.complexity.AddDynamicDatasetPayload.DatasetSchema(childComplexity), true + + case "AddDynamicDatasetSchemaPayload.datasetSchema": + if e.complexity.AddDynamicDatasetSchemaPayload.DatasetSchema == nil { + break + } + + return e.complexity.AddDynamicDatasetSchemaPayload.DatasetSchema(childComplexity), true + + case "AddInfoboxFieldPayload.infoboxField": + if e.complexity.AddInfoboxFieldPayload.InfoboxField == nil { + break + } + + return e.complexity.AddInfoboxFieldPayload.InfoboxField(childComplexity), true + + case "AddInfoboxFieldPayload.layer": + if e.complexity.AddInfoboxFieldPayload.Layer == nil { + break + } + + return e.complexity.AddInfoboxFieldPayload.Layer(childComplexity), true + + case "AddLayerGroupPayload.index": + if e.complexity.AddLayerGroupPayload.Index == nil { + break + } + + return e.complexity.AddLayerGroupPayload.Index(childComplexity), true + + case "AddLayerGroupPayload.layer": + if e.complexity.AddLayerGroupPayload.Layer == nil { + break + } + + return e.complexity.AddLayerGroupPayload.Layer(childComplexity), true + + case "AddLayerGroupPayload.parentLayer": + if e.complexity.AddLayerGroupPayload.ParentLayer == nil { + break + } + + return e.complexity.AddLayerGroupPayload.ParentLayer(childComplexity), true + + case "AddLayerItemPayload.index": + if e.complexity.AddLayerItemPayload.Index == nil { + break + } + + return e.complexity.AddLayerItemPayload.Index(childComplexity), true + + case "AddLayerItemPayload.layer": + if e.complexity.AddLayerItemPayload.Layer == nil { + break + } + + return e.complexity.AddLayerItemPayload.Layer(childComplexity), true + + case "AddLayerItemPayload.parentLayer": + if e.complexity.AddLayerItemPayload.ParentLayer == nil { + break + } + + return e.complexity.AddLayerItemPayload.ParentLayer(childComplexity), true + + case "AddMemberToTeamPayload.team": + if e.complexity.AddMemberToTeamPayload.Team == nil { + break + } + + return e.complexity.AddMemberToTeamPayload.Team(childComplexity), true + + case "AddWidgetPayload.scene": + if e.complexity.AddWidgetPayload.Scene == nil { + break + } + + return e.complexity.AddWidgetPayload.Scene(childComplexity), true + + case "AddWidgetPayload.sceneWidget": + if e.complexity.AddWidgetPayload.SceneWidget == nil { + break + } + + return e.complexity.AddWidgetPayload.SceneWidget(childComplexity), true + + case "Asset.contentType": + if e.complexity.Asset.ContentType == nil { + break + } + + return e.complexity.Asset.ContentType(childComplexity), true + + case "Asset.createdAt": + if e.complexity.Asset.CreatedAt == nil { + break + } + + return e.complexity.Asset.CreatedAt(childComplexity), true + + case "Asset.id": + if e.complexity.Asset.ID == nil { + break + } + + return e.complexity.Asset.ID(childComplexity), true + + case "Asset.name": + if e.complexity.Asset.Name == nil { + break + } + + return e.complexity.Asset.Name(childComplexity), true + + case "Asset.size": + if e.complexity.Asset.Size == nil { + break + } + + return e.complexity.Asset.Size(childComplexity), true + + case "Asset.team": + if e.complexity.Asset.Team == nil { + break + } + + return e.complexity.Asset.Team(childComplexity), true + + case "Asset.teamId": + if e.complexity.Asset.TeamID == nil { + break + } + + return e.complexity.Asset.TeamID(childComplexity), true + + case "Asset.url": + if e.complexity.Asset.URL == nil { + break + } + + return e.complexity.Asset.URL(childComplexity), true + + case "AssetConnection.edges": + if e.complexity.AssetConnection.Edges == nil { + break + } + + return e.complexity.AssetConnection.Edges(childComplexity), true + + case "AssetConnection.nodes": + if e.complexity.AssetConnection.Nodes == nil { + break + } + + return e.complexity.AssetConnection.Nodes(childComplexity), true + + case "AssetConnection.pageInfo": + if e.complexity.AssetConnection.PageInfo == nil { + break + } + + return e.complexity.AssetConnection.PageInfo(childComplexity), true + + case "AssetConnection.totalCount": + if e.complexity.AssetConnection.TotalCount == nil { + break + } + + return e.complexity.AssetConnection.TotalCount(childComplexity), true + + case "AssetEdge.cursor": + if e.complexity.AssetEdge.Cursor == nil { + break + } + + return e.complexity.AssetEdge.Cursor(childComplexity), true + + case "AssetEdge.node": + if e.complexity.AssetEdge.Node == nil { + break + } + + return e.complexity.AssetEdge.Node(childComplexity), true + + case "AttachTagItemToGroupPayload.tag": + if e.complexity.AttachTagItemToGroupPayload.Tag == nil { + break + } + + return e.complexity.AttachTagItemToGroupPayload.Tag(childComplexity), true + + case "AttachTagToLayerPayload.layer": + if e.complexity.AttachTagToLayerPayload.Layer == nil { + break + } + + return e.complexity.AttachTagToLayerPayload.Layer(childComplexity), true + + case "Camera.altitude": + if e.complexity.Camera.Altitude == nil { + break + } + + return e.complexity.Camera.Altitude(childComplexity), true + + case "Camera.fov": + if e.complexity.Camera.Fov == nil { + break + } + + return e.complexity.Camera.Fov(childComplexity), true + + case "Camera.heading": + if e.complexity.Camera.Heading == nil { + break + } + + return e.complexity.Camera.Heading(childComplexity), true + + case "Camera.lat": + if e.complexity.Camera.Lat == nil { + break + } + + return e.complexity.Camera.Lat(childComplexity), true + + case "Camera.lng": + if e.complexity.Camera.Lng == nil { + break + } + + return e.complexity.Camera.Lng(childComplexity), true + + case "Camera.pitch": + if e.complexity.Camera.Pitch == nil { + break + } + + return e.complexity.Camera.Pitch(childComplexity), true + + case "Camera.roll": + if e.complexity.Camera.Roll == nil { + break + } + + return e.complexity.Camera.Roll(childComplexity), true + + case "Cluster.id": + if e.complexity.Cluster.ID == nil { + break + } + + return e.complexity.Cluster.ID(childComplexity), true + + case "Cluster.name": + if e.complexity.Cluster.Name == nil { + break + } + + return e.complexity.Cluster.Name(childComplexity), true + + case "Cluster.property": + if e.complexity.Cluster.Property == nil { + break + } + + return e.complexity.Cluster.Property(childComplexity), true + + case "Cluster.propertyId": + if e.complexity.Cluster.PropertyID == nil { + break + } + + return e.complexity.Cluster.PropertyID(childComplexity), true + + case "CreateAssetPayload.asset": + if e.complexity.CreateAssetPayload.Asset == nil { + break + } + + return e.complexity.CreateAssetPayload.Asset(childComplexity), true + + case "CreateInfoboxPayload.layer": + if e.complexity.CreateInfoboxPayload.Layer == nil { + break + } + + return e.complexity.CreateInfoboxPayload.Layer(childComplexity), true + + case "CreateScenePayload.scene": + if e.complexity.CreateScenePayload.Scene == nil { + break + } + + return e.complexity.CreateScenePayload.Scene(childComplexity), true + + case "CreateTagGroupPayload.tag": + if e.complexity.CreateTagGroupPayload.Tag == nil { + break + } + + return e.complexity.CreateTagGroupPayload.Tag(childComplexity), true + + case "CreateTagItemPayload.parent": + if e.complexity.CreateTagItemPayload.Parent == nil { + break + } + + return e.complexity.CreateTagItemPayload.Parent(childComplexity), true + + case "CreateTagItemPayload.tag": + if e.complexity.CreateTagItemPayload.Tag == nil { + break + } + + return e.complexity.CreateTagItemPayload.Tag(childComplexity), true + + case "CreateTeamPayload.team": + if e.complexity.CreateTeamPayload.Team == nil { + break + } + + return e.complexity.CreateTeamPayload.Team(childComplexity), true + + case "Dataset.fields": + if e.complexity.Dataset.Fields == nil { + break + } + + return e.complexity.Dataset.Fields(childComplexity), true + + case "Dataset.id": + if e.complexity.Dataset.ID == nil { + break + } + + return e.complexity.Dataset.ID(childComplexity), true + + case "Dataset.name": + if e.complexity.Dataset.Name == nil { + break + } + + return e.complexity.Dataset.Name(childComplexity), true + + case "Dataset.schema": + if e.complexity.Dataset.Schema == nil { + break + } + + return e.complexity.Dataset.Schema(childComplexity), true + + case "Dataset.schemaId": + if e.complexity.Dataset.SchemaID == nil { + break + } + + return e.complexity.Dataset.SchemaID(childComplexity), true + + case "Dataset.source": + if e.complexity.Dataset.Source == nil { + break + } + + return e.complexity.Dataset.Source(childComplexity), true + + case "DatasetConnection.edges": + if e.complexity.DatasetConnection.Edges == nil { + break + } + + return e.complexity.DatasetConnection.Edges(childComplexity), true + + case "DatasetConnection.nodes": + if e.complexity.DatasetConnection.Nodes == nil { + break + } + + return e.complexity.DatasetConnection.Nodes(childComplexity), true + + case "DatasetConnection.pageInfo": + if e.complexity.DatasetConnection.PageInfo == nil { + break + } + + return e.complexity.DatasetConnection.PageInfo(childComplexity), true + + case "DatasetConnection.totalCount": + if e.complexity.DatasetConnection.TotalCount == nil { + break + } + + return e.complexity.DatasetConnection.TotalCount(childComplexity), true + + case "DatasetEdge.cursor": + if e.complexity.DatasetEdge.Cursor == nil { + break + } + + return e.complexity.DatasetEdge.Cursor(childComplexity), true + + case "DatasetEdge.node": + if e.complexity.DatasetEdge.Node == nil { + break + } + + return e.complexity.DatasetEdge.Node(childComplexity), true + + case "DatasetField.field": + if e.complexity.DatasetField.Field == nil { + break + } + + return e.complexity.DatasetField.Field(childComplexity), true + + case "DatasetField.fieldId": + if e.complexity.DatasetField.FieldID == nil { + break + } + + return e.complexity.DatasetField.FieldID(childComplexity), true + + case "DatasetField.schema": + if e.complexity.DatasetField.Schema == nil { + break + } + + return e.complexity.DatasetField.Schema(childComplexity), true + + case "DatasetField.schemaId": + if e.complexity.DatasetField.SchemaID == nil { + break + } + + return e.complexity.DatasetField.SchemaID(childComplexity), true + + case "DatasetField.source": + if e.complexity.DatasetField.Source == nil { + break + } + + return e.complexity.DatasetField.Source(childComplexity), true + + case "DatasetField.type": + if e.complexity.DatasetField.Type == nil { + break + } + + return e.complexity.DatasetField.Type(childComplexity), true + + case "DatasetField.value": + if e.complexity.DatasetField.Value == nil { + break + } + + return e.complexity.DatasetField.Value(childComplexity), true + + case "DatasetField.valueRef": + if e.complexity.DatasetField.ValueRef == nil { + break + } + + return e.complexity.DatasetField.ValueRef(childComplexity), true + + case "DatasetSchema.datasets": + if e.complexity.DatasetSchema.Datasets == nil { + break + } + + args, err := ec.field_DatasetSchema_datasets_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.DatasetSchema.Datasets(childComplexity, args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "DatasetSchema.dynamic": + if e.complexity.DatasetSchema.Dynamic == nil { + break + } + + return e.complexity.DatasetSchema.Dynamic(childComplexity), true + + case "DatasetSchema.fields": + if e.complexity.DatasetSchema.Fields == nil { + break + } + + return e.complexity.DatasetSchema.Fields(childComplexity), true + + case "DatasetSchema.id": + if e.complexity.DatasetSchema.ID == nil { + break + } + + return e.complexity.DatasetSchema.ID(childComplexity), true + + case "DatasetSchema.name": + if e.complexity.DatasetSchema.Name == nil { + break + } + + return e.complexity.DatasetSchema.Name(childComplexity), true + + case "DatasetSchema.representativeField": + if e.complexity.DatasetSchema.RepresentativeField == nil { + break + } + + return e.complexity.DatasetSchema.RepresentativeField(childComplexity), true + + case "DatasetSchema.representativeFieldId": + if e.complexity.DatasetSchema.RepresentativeFieldID == nil { + break + } + + return e.complexity.DatasetSchema.RepresentativeFieldID(childComplexity), true + + case "DatasetSchema.scene": + if e.complexity.DatasetSchema.Scene == nil { + break + } + + return e.complexity.DatasetSchema.Scene(childComplexity), true + + case "DatasetSchema.sceneId": + if e.complexity.DatasetSchema.SceneID == nil { + break + } + + return e.complexity.DatasetSchema.SceneID(childComplexity), true + + case "DatasetSchema.source": + if e.complexity.DatasetSchema.Source == nil { + break + } + + return e.complexity.DatasetSchema.Source(childComplexity), true + + case "DatasetSchema.totalCount": + if e.complexity.DatasetSchema.TotalCount == nil { + break + } + + return e.complexity.DatasetSchema.TotalCount(childComplexity), true + + case "DatasetSchemaConnection.edges": + if e.complexity.DatasetSchemaConnection.Edges == nil { + break + } + + return e.complexity.DatasetSchemaConnection.Edges(childComplexity), true + + case "DatasetSchemaConnection.nodes": + if e.complexity.DatasetSchemaConnection.Nodes == nil { + break + } + + return e.complexity.DatasetSchemaConnection.Nodes(childComplexity), true + + case "DatasetSchemaConnection.pageInfo": + if e.complexity.DatasetSchemaConnection.PageInfo == nil { + break + } + + return e.complexity.DatasetSchemaConnection.PageInfo(childComplexity), true + + case "DatasetSchemaConnection.totalCount": + if e.complexity.DatasetSchemaConnection.TotalCount == nil { + break + } + + return e.complexity.DatasetSchemaConnection.TotalCount(childComplexity), true + + case "DatasetSchemaEdge.cursor": + if e.complexity.DatasetSchemaEdge.Cursor == nil { + break + } + + return e.complexity.DatasetSchemaEdge.Cursor(childComplexity), true + + case "DatasetSchemaEdge.node": + if e.complexity.DatasetSchemaEdge.Node == nil { + break + } + + return e.complexity.DatasetSchemaEdge.Node(childComplexity), true + + case "DatasetSchemaField.id": + if e.complexity.DatasetSchemaField.ID == nil { + break + } + + return e.complexity.DatasetSchemaField.ID(childComplexity), true + + case "DatasetSchemaField.name": + if e.complexity.DatasetSchemaField.Name == nil { + break + } + + return e.complexity.DatasetSchemaField.Name(childComplexity), true + + case "DatasetSchemaField.ref": + if e.complexity.DatasetSchemaField.Ref == nil { + break + } + + return e.complexity.DatasetSchemaField.Ref(childComplexity), true + + case "DatasetSchemaField.refId": + if e.complexity.DatasetSchemaField.RefID == nil { + break + } + + return e.complexity.DatasetSchemaField.RefID(childComplexity), true + + case "DatasetSchemaField.schema": + if e.complexity.DatasetSchemaField.Schema == nil { + break + } + + return e.complexity.DatasetSchemaField.Schema(childComplexity), true + + case "DatasetSchemaField.schemaId": + if e.complexity.DatasetSchemaField.SchemaID == nil { + break + } + + return e.complexity.DatasetSchemaField.SchemaID(childComplexity), true + + case "DatasetSchemaField.source": + if e.complexity.DatasetSchemaField.Source == nil { + break + } + + return e.complexity.DatasetSchemaField.Source(childComplexity), true + + case "DatasetSchemaField.type": + if e.complexity.DatasetSchemaField.Type == nil { + break + } + + return e.complexity.DatasetSchemaField.Type(childComplexity), true + + case "DeleteMePayload.userId": + if e.complexity.DeleteMePayload.UserID == nil { + break + } + + return e.complexity.DeleteMePayload.UserID(childComplexity), true + + case "DeleteProjectPayload.projectId": + if e.complexity.DeleteProjectPayload.ProjectID == nil { + break + } + + return e.complexity.DeleteProjectPayload.ProjectID(childComplexity), true + + case "DeleteTeamPayload.teamId": + if e.complexity.DeleteTeamPayload.TeamID == nil { + break + } + + return e.complexity.DeleteTeamPayload.TeamID(childComplexity), true + + case "DetachTagFromLayerPayload.layer": + if e.complexity.DetachTagFromLayerPayload.Layer == nil { + break + } + + return e.complexity.DetachTagFromLayerPayload.Layer(childComplexity), true + + case "DetachTagItemFromGroupPayload.tag": + if e.complexity.DetachTagItemFromGroupPayload.Tag == nil { + break + } + + return e.complexity.DetachTagItemFromGroupPayload.Tag(childComplexity), true + + case "ImportDatasetPayload.datasetSchema": + if e.complexity.ImportDatasetPayload.DatasetSchema == nil { + break + } + + return e.complexity.ImportDatasetPayload.DatasetSchema(childComplexity), true + + case "ImportLayerPayload.layers": + if e.complexity.ImportLayerPayload.Layers == nil { + break + } + + return e.complexity.ImportLayerPayload.Layers(childComplexity), true + + case "ImportLayerPayload.parentLayer": + if e.complexity.ImportLayerPayload.ParentLayer == nil { + break + } + + return e.complexity.ImportLayerPayload.ParentLayer(childComplexity), true + + case "Infobox.fields": + if e.complexity.Infobox.Fields == nil { + break + } + + return e.complexity.Infobox.Fields(childComplexity), true + + case "Infobox.layer": + if e.complexity.Infobox.Layer == nil { + break + } + + return e.complexity.Infobox.Layer(childComplexity), true + + case "Infobox.layerId": + if e.complexity.Infobox.LayerID == nil { + break + } + + return e.complexity.Infobox.LayerID(childComplexity), true + + case "Infobox.linkedDataset": + if e.complexity.Infobox.LinkedDataset == nil { + break + } + + return e.complexity.Infobox.LinkedDataset(childComplexity), true + + case "Infobox.linkedDatasetId": + if e.complexity.Infobox.LinkedDatasetID == nil { + break + } + + return e.complexity.Infobox.LinkedDatasetID(childComplexity), true + + case "Infobox.merged": + if e.complexity.Infobox.Merged == nil { + break + } + + return e.complexity.Infobox.Merged(childComplexity), true + + case "Infobox.property": + if e.complexity.Infobox.Property == nil { + break + } + + return e.complexity.Infobox.Property(childComplexity), true + + case "Infobox.propertyId": + if e.complexity.Infobox.PropertyID == nil { + break + } + + return e.complexity.Infobox.PropertyID(childComplexity), true + + case "Infobox.scene": + if e.complexity.Infobox.Scene == nil { + break + } + + return e.complexity.Infobox.Scene(childComplexity), true + + case "Infobox.sceneId": + if e.complexity.Infobox.SceneID == nil { + break + } + + return e.complexity.Infobox.SceneID(childComplexity), true + + case "InfoboxField.extension": + if e.complexity.InfoboxField.Extension == nil { + break + } + + return e.complexity.InfoboxField.Extension(childComplexity), true + + case "InfoboxField.extensionId": + if e.complexity.InfoboxField.ExtensionID == nil { + break + } + + return e.complexity.InfoboxField.ExtensionID(childComplexity), true + + case "InfoboxField.id": + if e.complexity.InfoboxField.ID == nil { + break + } + + return e.complexity.InfoboxField.ID(childComplexity), true + + case "InfoboxField.infobox": + if e.complexity.InfoboxField.Infobox == nil { + break + } + + return e.complexity.InfoboxField.Infobox(childComplexity), true + + case "InfoboxField.layer": + if e.complexity.InfoboxField.Layer == nil { + break + } + + return e.complexity.InfoboxField.Layer(childComplexity), true + + case "InfoboxField.layerId": + if e.complexity.InfoboxField.LayerID == nil { + break + } + + return e.complexity.InfoboxField.LayerID(childComplexity), true + + case "InfoboxField.linkedDataset": + if e.complexity.InfoboxField.LinkedDataset == nil { + break + } + + return e.complexity.InfoboxField.LinkedDataset(childComplexity), true + + case "InfoboxField.linkedDatasetId": + if e.complexity.InfoboxField.LinkedDatasetID == nil { + break + } + + return e.complexity.InfoboxField.LinkedDatasetID(childComplexity), true + + case "InfoboxField.merged": + if e.complexity.InfoboxField.Merged == nil { + break + } + + return e.complexity.InfoboxField.Merged(childComplexity), true + + case "InfoboxField.plugin": + if e.complexity.InfoboxField.Plugin == nil { + break + } + + return e.complexity.InfoboxField.Plugin(childComplexity), true + + case "InfoboxField.pluginId": + if e.complexity.InfoboxField.PluginID == nil { + break + } + + return e.complexity.InfoboxField.PluginID(childComplexity), true + + case "InfoboxField.property": + if e.complexity.InfoboxField.Property == nil { + break + } + + return e.complexity.InfoboxField.Property(childComplexity), true + + case "InfoboxField.propertyId": + if e.complexity.InfoboxField.PropertyID == nil { + break + } + + return e.complexity.InfoboxField.PropertyID(childComplexity), true + + case "InfoboxField.scene": + if e.complexity.InfoboxField.Scene == nil { + break + } + + return e.complexity.InfoboxField.Scene(childComplexity), true + + case "InfoboxField.sceneId": + if e.complexity.InfoboxField.SceneID == nil { + break + } + + return e.complexity.InfoboxField.SceneID(childComplexity), true + + case "InfoboxField.scenePlugin": + if e.complexity.InfoboxField.ScenePlugin == nil { + break + } + + return e.complexity.InfoboxField.ScenePlugin(childComplexity), true + + case "InstallPluginPayload.scene": + if e.complexity.InstallPluginPayload.Scene == nil { + break + } + + return e.complexity.InstallPluginPayload.Scene(childComplexity), true + + case "InstallPluginPayload.scenePlugin": + if e.complexity.InstallPluginPayload.ScenePlugin == nil { + break + } + + return e.complexity.InstallPluginPayload.ScenePlugin(childComplexity), true + + case "LatLng.lat": + if e.complexity.LatLng.Lat == nil { + break + } + + return e.complexity.LatLng.Lat(childComplexity), true + + case "LatLng.lng": + if e.complexity.LatLng.Lng == nil { + break + } + + return e.complexity.LatLng.Lng(childComplexity), true + + case "LatLngHeight.height": + if e.complexity.LatLngHeight.Height == nil { + break + } + + return e.complexity.LatLngHeight.Height(childComplexity), true + + case "LatLngHeight.lat": + if e.complexity.LatLngHeight.Lat == nil { + break + } + + return e.complexity.LatLngHeight.Lat(childComplexity), true + + case "LatLngHeight.lng": + if e.complexity.LatLngHeight.Lng == nil { + break + } + + return e.complexity.LatLngHeight.Lng(childComplexity), true + + case "LayerGroup.extension": + if e.complexity.LayerGroup.Extension == nil { + break + } + + return e.complexity.LayerGroup.Extension(childComplexity), true + + case "LayerGroup.extensionId": + if e.complexity.LayerGroup.ExtensionID == nil { + break + } + + return e.complexity.LayerGroup.ExtensionID(childComplexity), true + + case "LayerGroup.id": + if e.complexity.LayerGroup.ID == nil { + break + } + + return e.complexity.LayerGroup.ID(childComplexity), true + + case "LayerGroup.infobox": + if e.complexity.LayerGroup.Infobox == nil { + break + } + + return e.complexity.LayerGroup.Infobox(childComplexity), true + + case "LayerGroup.isVisible": + if e.complexity.LayerGroup.IsVisible == nil { + break + } + + return e.complexity.LayerGroup.IsVisible(childComplexity), true + + case "LayerGroup.layerIds": + if e.complexity.LayerGroup.LayerIds == nil { + break + } + + return e.complexity.LayerGroup.LayerIds(childComplexity), true + + case "LayerGroup.layers": + if e.complexity.LayerGroup.Layers == nil { + break + } + + return e.complexity.LayerGroup.Layers(childComplexity), true + + case "LayerGroup.linkedDatasetSchema": + if e.complexity.LayerGroup.LinkedDatasetSchema == nil { + break + } + + return e.complexity.LayerGroup.LinkedDatasetSchema(childComplexity), true + + case "LayerGroup.linkedDatasetSchemaId": + if e.complexity.LayerGroup.LinkedDatasetSchemaID == nil { + break + } + + return e.complexity.LayerGroup.LinkedDatasetSchemaID(childComplexity), true + + case "LayerGroup.name": + if e.complexity.LayerGroup.Name == nil { + break + } + + return e.complexity.LayerGroup.Name(childComplexity), true + + case "LayerGroup.parent": + if e.complexity.LayerGroup.Parent == nil { + break + } + + return e.complexity.LayerGroup.Parent(childComplexity), true + + case "LayerGroup.parentId": + if e.complexity.LayerGroup.ParentID == nil { + break + } + + return e.complexity.LayerGroup.ParentID(childComplexity), true + + case "LayerGroup.plugin": + if e.complexity.LayerGroup.Plugin == nil { + break + } + + return e.complexity.LayerGroup.Plugin(childComplexity), true + + case "LayerGroup.pluginId": + if e.complexity.LayerGroup.PluginID == nil { + break + } + + return e.complexity.LayerGroup.PluginID(childComplexity), true + + case "LayerGroup.property": + if e.complexity.LayerGroup.Property == nil { + break + } + + return e.complexity.LayerGroup.Property(childComplexity), true + + case "LayerGroup.propertyId": + if e.complexity.LayerGroup.PropertyID == nil { + break + } + + return e.complexity.LayerGroup.PropertyID(childComplexity), true + + case "LayerGroup.root": + if e.complexity.LayerGroup.Root == nil { + break + } + + return e.complexity.LayerGroup.Root(childComplexity), true + + case "LayerGroup.scene": + if e.complexity.LayerGroup.Scene == nil { + break + } + + return e.complexity.LayerGroup.Scene(childComplexity), true + + case "LayerGroup.sceneId": + if e.complexity.LayerGroup.SceneID == nil { + break + } + + return e.complexity.LayerGroup.SceneID(childComplexity), true + + case "LayerGroup.scenePlugin": + if e.complexity.LayerGroup.ScenePlugin == nil { + break + } + + return e.complexity.LayerGroup.ScenePlugin(childComplexity), true + + case "LayerGroup.tags": + if e.complexity.LayerGroup.Tags == nil { + break + } + + return e.complexity.LayerGroup.Tags(childComplexity), true + + case "LayerItem.extension": + if e.complexity.LayerItem.Extension == nil { + break + } + + return e.complexity.LayerItem.Extension(childComplexity), true + + case "LayerItem.extensionId": + if e.complexity.LayerItem.ExtensionID == nil { + break + } + + return e.complexity.LayerItem.ExtensionID(childComplexity), true + + case "LayerItem.id": + if e.complexity.LayerItem.ID == nil { + break + } + + return e.complexity.LayerItem.ID(childComplexity), true + + case "LayerItem.infobox": + if e.complexity.LayerItem.Infobox == nil { + break + } + + return e.complexity.LayerItem.Infobox(childComplexity), true + + case "LayerItem.isVisible": + if e.complexity.LayerItem.IsVisible == nil { + break + } + + return e.complexity.LayerItem.IsVisible(childComplexity), true + + case "LayerItem.linkedDataset": + if e.complexity.LayerItem.LinkedDataset == nil { + break + } + + return e.complexity.LayerItem.LinkedDataset(childComplexity), true + + case "LayerItem.linkedDatasetId": + if e.complexity.LayerItem.LinkedDatasetID == nil { + break + } + + return e.complexity.LayerItem.LinkedDatasetID(childComplexity), true + + case "LayerItem.merged": + if e.complexity.LayerItem.Merged == nil { + break + } + + return e.complexity.LayerItem.Merged(childComplexity), true + + case "LayerItem.name": + if e.complexity.LayerItem.Name == nil { + break + } + + return e.complexity.LayerItem.Name(childComplexity), true + + case "LayerItem.parent": + if e.complexity.LayerItem.Parent == nil { + break + } + + return e.complexity.LayerItem.Parent(childComplexity), true + + case "LayerItem.parentId": + if e.complexity.LayerItem.ParentID == nil { + break + } + + return e.complexity.LayerItem.ParentID(childComplexity), true + + case "LayerItem.plugin": + if e.complexity.LayerItem.Plugin == nil { + break + } + + return e.complexity.LayerItem.Plugin(childComplexity), true + + case "LayerItem.pluginId": + if e.complexity.LayerItem.PluginID == nil { + break + } + + return e.complexity.LayerItem.PluginID(childComplexity), true + + case "LayerItem.property": + if e.complexity.LayerItem.Property == nil { + break + } + + return e.complexity.LayerItem.Property(childComplexity), true + + case "LayerItem.propertyId": + if e.complexity.LayerItem.PropertyID == nil { + break + } + + return e.complexity.LayerItem.PropertyID(childComplexity), true + + case "LayerItem.scene": + if e.complexity.LayerItem.Scene == nil { + break + } + + return e.complexity.LayerItem.Scene(childComplexity), true + + case "LayerItem.sceneId": + if e.complexity.LayerItem.SceneID == nil { + break + } + + return e.complexity.LayerItem.SceneID(childComplexity), true + + case "LayerItem.scenePlugin": + if e.complexity.LayerItem.ScenePlugin == nil { + break + } + + return e.complexity.LayerItem.ScenePlugin(childComplexity), true + + case "LayerItem.tags": + if e.complexity.LayerItem.Tags == nil { + break + } + + return e.complexity.LayerItem.Tags(childComplexity), true + + case "LayerTagGroup.children": + if e.complexity.LayerTagGroup.Children == nil { + break + } + + return e.complexity.LayerTagGroup.Children(childComplexity), true + + case "LayerTagGroup.tag": + if e.complexity.LayerTagGroup.Tag == nil { + break + } + + return e.complexity.LayerTagGroup.Tag(childComplexity), true + + case "LayerTagGroup.tagId": + if e.complexity.LayerTagGroup.TagID == nil { + break + } + + return e.complexity.LayerTagGroup.TagID(childComplexity), true + + case "LayerTagItem.tag": + if e.complexity.LayerTagItem.Tag == nil { + break + } + + return e.complexity.LayerTagItem.Tag(childComplexity), true + + case "LayerTagItem.tagId": + if e.complexity.LayerTagItem.TagID == nil { + break + } + + return e.complexity.LayerTagItem.TagID(childComplexity), true + + case "Me.auths": + if e.complexity.Me.Auths == nil { + break + } + + return e.complexity.Me.Auths(childComplexity), true + + case "Me.email": + if e.complexity.Me.Email == nil { + break + } + + return e.complexity.Me.Email(childComplexity), true + + case "Me.id": + if e.complexity.Me.ID == nil { + break + } + + return e.complexity.Me.ID(childComplexity), true + + case "Me.lang": + if e.complexity.Me.Lang == nil { + break + } + + return e.complexity.Me.Lang(childComplexity), true + + case "Me.myTeam": + if e.complexity.Me.MyTeam == nil { + break + } + + return e.complexity.Me.MyTeam(childComplexity), true + + case "Me.myTeamId": + if e.complexity.Me.MyTeamID == nil { + break + } + + return e.complexity.Me.MyTeamID(childComplexity), true + + case "Me.name": + if e.complexity.Me.Name == nil { + break + } + + return e.complexity.Me.Name(childComplexity), true + + case "Me.teams": + if e.complexity.Me.Teams == nil { + break + } + + return e.complexity.Me.Teams(childComplexity), true + + case "Me.theme": + if e.complexity.Me.Theme == nil { + break + } + + return e.complexity.Me.Theme(childComplexity), true + + case "MergedInfobox.fields": + if e.complexity.MergedInfobox.Fields == nil { + break + } + + return e.complexity.MergedInfobox.Fields(childComplexity), true + + case "MergedInfobox.property": + if e.complexity.MergedInfobox.Property == nil { + break + } + + return e.complexity.MergedInfobox.Property(childComplexity), true + + case "MergedInfobox.scene": + if e.complexity.MergedInfobox.Scene == nil { + break + } + + return e.complexity.MergedInfobox.Scene(childComplexity), true + + case "MergedInfobox.sceneID": + if e.complexity.MergedInfobox.SceneID == nil { + break + } + + return e.complexity.MergedInfobox.SceneID(childComplexity), true + + case "MergedInfoboxField.extension": + if e.complexity.MergedInfoboxField.Extension == nil { + break + } + + return e.complexity.MergedInfoboxField.Extension(childComplexity), true + + case "MergedInfoboxField.extensionId": + if e.complexity.MergedInfoboxField.ExtensionID == nil { + break + } + + return e.complexity.MergedInfoboxField.ExtensionID(childComplexity), true + + case "MergedInfoboxField.originalId": + if e.complexity.MergedInfoboxField.OriginalID == nil { + break + } + + return e.complexity.MergedInfoboxField.OriginalID(childComplexity), true + + case "MergedInfoboxField.plugin": + if e.complexity.MergedInfoboxField.Plugin == nil { + break + } + + return e.complexity.MergedInfoboxField.Plugin(childComplexity), true + + case "MergedInfoboxField.pluginId": + if e.complexity.MergedInfoboxField.PluginID == nil { + break + } + + return e.complexity.MergedInfoboxField.PluginID(childComplexity), true + + case "MergedInfoboxField.property": + if e.complexity.MergedInfoboxField.Property == nil { + break + } + + return e.complexity.MergedInfoboxField.Property(childComplexity), true + + case "MergedInfoboxField.scene": + if e.complexity.MergedInfoboxField.Scene == nil { + break + } + + return e.complexity.MergedInfoboxField.Scene(childComplexity), true + + case "MergedInfoboxField.sceneID": + if e.complexity.MergedInfoboxField.SceneID == nil { + break + } + + return e.complexity.MergedInfoboxField.SceneID(childComplexity), true + + case "MergedInfoboxField.scenePlugin": + if e.complexity.MergedInfoboxField.ScenePlugin == nil { + break + } + + return e.complexity.MergedInfoboxField.ScenePlugin(childComplexity), true + + case "MergedLayer.infobox": + if e.complexity.MergedLayer.Infobox == nil { + break + } + + return e.complexity.MergedLayer.Infobox(childComplexity), true + + case "MergedLayer.original": + if e.complexity.MergedLayer.Original == nil { + break + } + + return e.complexity.MergedLayer.Original(childComplexity), true + + case "MergedLayer.originalId": + if e.complexity.MergedLayer.OriginalID == nil { + break + } + + return e.complexity.MergedLayer.OriginalID(childComplexity), true + + case "MergedLayer.parent": + if e.complexity.MergedLayer.Parent == nil { + break + } + + return e.complexity.MergedLayer.Parent(childComplexity), true + + case "MergedLayer.parentId": + if e.complexity.MergedLayer.ParentID == nil { + break + } + + return e.complexity.MergedLayer.ParentID(childComplexity), true + + case "MergedLayer.property": + if e.complexity.MergedLayer.Property == nil { + break + } + + return e.complexity.MergedLayer.Property(childComplexity), true + + case "MergedLayer.scene": + if e.complexity.MergedLayer.Scene == nil { + break + } + + return e.complexity.MergedLayer.Scene(childComplexity), true + + case "MergedLayer.sceneID": + if e.complexity.MergedLayer.SceneID == nil { + break + } + + return e.complexity.MergedLayer.SceneID(childComplexity), true + + case "MergedProperty.groups": + if e.complexity.MergedProperty.Groups == nil { + break + } + + return e.complexity.MergedProperty.Groups(childComplexity), true + + case "MergedProperty.linkedDataset": + if e.complexity.MergedProperty.LinkedDataset == nil { + break + } + + return e.complexity.MergedProperty.LinkedDataset(childComplexity), true + + case "MergedProperty.linkedDatasetId": + if e.complexity.MergedProperty.LinkedDatasetID == nil { + break + } + + return e.complexity.MergedProperty.LinkedDatasetID(childComplexity), true + + case "MergedProperty.original": + if e.complexity.MergedProperty.Original == nil { + break + } + + return e.complexity.MergedProperty.Original(childComplexity), true + + case "MergedProperty.originalId": + if e.complexity.MergedProperty.OriginalID == nil { + break + } + + return e.complexity.MergedProperty.OriginalID(childComplexity), true + + case "MergedProperty.parent": + if e.complexity.MergedProperty.Parent == nil { + break + } + + return e.complexity.MergedProperty.Parent(childComplexity), true + + case "MergedProperty.parentId": + if e.complexity.MergedProperty.ParentID == nil { + break + } + + return e.complexity.MergedProperty.ParentID(childComplexity), true + + case "MergedProperty.schema": + if e.complexity.MergedProperty.Schema == nil { + break + } + + return e.complexity.MergedProperty.Schema(childComplexity), true + + case "MergedProperty.schemaId": + if e.complexity.MergedProperty.SchemaID == nil { + break + } + + return e.complexity.MergedProperty.SchemaID(childComplexity), true + + case "MergedPropertyField.actualValue": + if e.complexity.MergedPropertyField.ActualValue == nil { + break + } + + return e.complexity.MergedPropertyField.ActualValue(childComplexity), true + + case "MergedPropertyField.field": + if e.complexity.MergedPropertyField.Field == nil { + break + } + + return e.complexity.MergedPropertyField.Field(childComplexity), true + + case "MergedPropertyField.fieldId": + if e.complexity.MergedPropertyField.FieldID == nil { + break + } + + return e.complexity.MergedPropertyField.FieldID(childComplexity), true + + case "MergedPropertyField.links": + if e.complexity.MergedPropertyField.Links == nil { + break + } + + return e.complexity.MergedPropertyField.Links(childComplexity), true + + case "MergedPropertyField.overridden": + if e.complexity.MergedPropertyField.Overridden == nil { + break + } + + return e.complexity.MergedPropertyField.Overridden(childComplexity), true + + case "MergedPropertyField.schema": + if e.complexity.MergedPropertyField.Schema == nil { + break + } + + return e.complexity.MergedPropertyField.Schema(childComplexity), true + + case "MergedPropertyField.schemaId": + if e.complexity.MergedPropertyField.SchemaID == nil { + break + } + + return e.complexity.MergedPropertyField.SchemaID(childComplexity), true + + case "MergedPropertyField.type": + if e.complexity.MergedPropertyField.Type == nil { + break + } + + return e.complexity.MergedPropertyField.Type(childComplexity), true + + case "MergedPropertyField.value": + if e.complexity.MergedPropertyField.Value == nil { + break + } + + return e.complexity.MergedPropertyField.Value(childComplexity), true + + case "MergedPropertyGroup.fields": + if e.complexity.MergedPropertyGroup.Fields == nil { + break + } + + return e.complexity.MergedPropertyGroup.Fields(childComplexity), true + + case "MergedPropertyGroup.groups": + if e.complexity.MergedPropertyGroup.Groups == nil { + break + } + + return e.complexity.MergedPropertyGroup.Groups(childComplexity), true + + case "MergedPropertyGroup.linkedDataset": + if e.complexity.MergedPropertyGroup.LinkedDataset == nil { + break + } + + return e.complexity.MergedPropertyGroup.LinkedDataset(childComplexity), true + + case "MergedPropertyGroup.linkedDatasetId": + if e.complexity.MergedPropertyGroup.LinkedDatasetID == nil { + break + } + + return e.complexity.MergedPropertyGroup.LinkedDatasetID(childComplexity), true + + case "MergedPropertyGroup.original": + if e.complexity.MergedPropertyGroup.Original == nil { + break + } + + return e.complexity.MergedPropertyGroup.Original(childComplexity), true + + case "MergedPropertyGroup.originalId": + if e.complexity.MergedPropertyGroup.OriginalID == nil { + break + } + + return e.complexity.MergedPropertyGroup.OriginalID(childComplexity), true + + case "MergedPropertyGroup.originalProperty": + if e.complexity.MergedPropertyGroup.OriginalProperty == nil { + break + } + + return e.complexity.MergedPropertyGroup.OriginalProperty(childComplexity), true + + case "MergedPropertyGroup.originalPropertyId": + if e.complexity.MergedPropertyGroup.OriginalPropertyID == nil { + break + } + + return e.complexity.MergedPropertyGroup.OriginalPropertyID(childComplexity), true + + case "MergedPropertyGroup.parent": + if e.complexity.MergedPropertyGroup.Parent == nil { + break + } + + return e.complexity.MergedPropertyGroup.Parent(childComplexity), true + + case "MergedPropertyGroup.parentId": + if e.complexity.MergedPropertyGroup.ParentID == nil { + break + } + + return e.complexity.MergedPropertyGroup.ParentID(childComplexity), true + + case "MergedPropertyGroup.parentProperty": + if e.complexity.MergedPropertyGroup.ParentProperty == nil { + break + } + + return e.complexity.MergedPropertyGroup.ParentProperty(childComplexity), true + + case "MergedPropertyGroup.parentPropertyId": + if e.complexity.MergedPropertyGroup.ParentPropertyID == nil { + break + } + + return e.complexity.MergedPropertyGroup.ParentPropertyID(childComplexity), true + + case "MergedPropertyGroup.schema": + if e.complexity.MergedPropertyGroup.Schema == nil { + break + } + + return e.complexity.MergedPropertyGroup.Schema(childComplexity), true + + case "MergedPropertyGroup.schemaGroupId": + if e.complexity.MergedPropertyGroup.SchemaGroupID == nil { + break + } + + return e.complexity.MergedPropertyGroup.SchemaGroupID(childComplexity), true + + case "MergedPropertyGroup.schemaId": + if e.complexity.MergedPropertyGroup.SchemaID == nil { + break + } + + return e.complexity.MergedPropertyGroup.SchemaID(childComplexity), true + + case "MoveInfoboxFieldPayload.index": + if e.complexity.MoveInfoboxFieldPayload.Index == nil { + break + } + + return e.complexity.MoveInfoboxFieldPayload.Index(childComplexity), true + + case "MoveInfoboxFieldPayload.infoboxFieldId": + if e.complexity.MoveInfoboxFieldPayload.InfoboxFieldID == nil { + break + } + + return e.complexity.MoveInfoboxFieldPayload.InfoboxFieldID(childComplexity), true + + case "MoveInfoboxFieldPayload.layer": + if e.complexity.MoveInfoboxFieldPayload.Layer == nil { + break + } + + return e.complexity.MoveInfoboxFieldPayload.Layer(childComplexity), true + + case "MoveLayerPayload.fromParentLayer": + if e.complexity.MoveLayerPayload.FromParentLayer == nil { + break + } + + return e.complexity.MoveLayerPayload.FromParentLayer(childComplexity), true + + case "MoveLayerPayload.index": + if e.complexity.MoveLayerPayload.Index == nil { + break + } + + return e.complexity.MoveLayerPayload.Index(childComplexity), true + + case "MoveLayerPayload.layerId": + if e.complexity.MoveLayerPayload.LayerID == nil { + break + } + + return e.complexity.MoveLayerPayload.LayerID(childComplexity), true + + case "MoveLayerPayload.toParentLayer": + if e.complexity.MoveLayerPayload.ToParentLayer == nil { + break + } + + return e.complexity.MoveLayerPayload.ToParentLayer(childComplexity), true + + case "Mutation.addCluster": + if e.complexity.Mutation.AddCluster == nil { + break + } + + args, err := ec.field_Mutation_addCluster_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddCluster(childComplexity, args["input"].(gqlmodel.AddClusterInput)), true + + case "Mutation.addDatasetSchema": + if e.complexity.Mutation.AddDatasetSchema == nil { + break + } + + args, err := ec.field_Mutation_addDatasetSchema_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddDatasetSchema(childComplexity, args["input"].(gqlmodel.AddDatasetSchemaInput)), true + + case "Mutation.addDynamicDataset": + if e.complexity.Mutation.AddDynamicDataset == nil { + break + } + + args, err := ec.field_Mutation_addDynamicDataset_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddDynamicDataset(childComplexity, args["input"].(gqlmodel.AddDynamicDatasetInput)), true + + case "Mutation.addDynamicDatasetSchema": + if e.complexity.Mutation.AddDynamicDatasetSchema == nil { + break + } + + args, err := ec.field_Mutation_addDynamicDatasetSchema_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddDynamicDatasetSchema(childComplexity, args["input"].(gqlmodel.AddDynamicDatasetSchemaInput)), true + + case "Mutation.addInfoboxField": + if e.complexity.Mutation.AddInfoboxField == nil { + break + } + + args, err := ec.field_Mutation_addInfoboxField_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddInfoboxField(childComplexity, args["input"].(gqlmodel.AddInfoboxFieldInput)), true + + case "Mutation.addLayerGroup": + if e.complexity.Mutation.AddLayerGroup == nil { + break + } + + args, err := ec.field_Mutation_addLayerGroup_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddLayerGroup(childComplexity, args["input"].(gqlmodel.AddLayerGroupInput)), true + + case "Mutation.addLayerItem": + if e.complexity.Mutation.AddLayerItem == nil { + break + } + + args, err := ec.field_Mutation_addLayerItem_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddLayerItem(childComplexity, args["input"].(gqlmodel.AddLayerItemInput)), true + + case "Mutation.addMemberToTeam": + if e.complexity.Mutation.AddMemberToTeam == nil { + break + } + + args, err := ec.field_Mutation_addMemberToTeam_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddMemberToTeam(childComplexity, args["input"].(gqlmodel.AddMemberToTeamInput)), true + + case "Mutation.addPropertyItem": + if e.complexity.Mutation.AddPropertyItem == nil { + break + } + + args, err := ec.field_Mutation_addPropertyItem_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddPropertyItem(childComplexity, args["input"].(gqlmodel.AddPropertyItemInput)), true + + case "Mutation.addWidget": + if e.complexity.Mutation.AddWidget == nil { + break + } + + args, err := ec.field_Mutation_addWidget_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddWidget(childComplexity, args["input"].(gqlmodel.AddWidgetInput)), true + + case "Mutation.attachTagItemToGroup": + if e.complexity.Mutation.AttachTagItemToGroup == nil { + break + } + + args, err := ec.field_Mutation_attachTagItemToGroup_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AttachTagItemToGroup(childComplexity, args["input"].(gqlmodel.AttachTagItemToGroupInput)), true + + case "Mutation.attachTagToLayer": + if e.complexity.Mutation.AttachTagToLayer == nil { + break + } + + args, err := ec.field_Mutation_attachTagToLayer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AttachTagToLayer(childComplexity, args["input"].(gqlmodel.AttachTagToLayerInput)), true + + case "Mutation.createAsset": + if e.complexity.Mutation.CreateAsset == nil { + break + } + + args, err := ec.field_Mutation_createAsset_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateAsset(childComplexity, args["input"].(gqlmodel.CreateAssetInput)), true + + case "Mutation.createInfobox": + if e.complexity.Mutation.CreateInfobox == nil { + break + } + + args, err := ec.field_Mutation_createInfobox_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateInfobox(childComplexity, args["input"].(gqlmodel.CreateInfoboxInput)), true + + case "Mutation.createProject": + if e.complexity.Mutation.CreateProject == nil { + break + } + + args, err := ec.field_Mutation_createProject_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateProject(childComplexity, args["input"].(gqlmodel.CreateProjectInput)), true + + case "Mutation.createScene": + if e.complexity.Mutation.CreateScene == nil { + break + } + + args, err := ec.field_Mutation_createScene_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateScene(childComplexity, args["input"].(gqlmodel.CreateSceneInput)), true + + case "Mutation.createTagGroup": + if e.complexity.Mutation.CreateTagGroup == nil { + break + } + + args, err := ec.field_Mutation_createTagGroup_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateTagGroup(childComplexity, args["input"].(gqlmodel.CreateTagGroupInput)), true + + case "Mutation.createTagItem": + if e.complexity.Mutation.CreateTagItem == nil { + break + } + + args, err := ec.field_Mutation_createTagItem_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateTagItem(childComplexity, args["input"].(gqlmodel.CreateTagItemInput)), true + + case "Mutation.createTeam": + if e.complexity.Mutation.CreateTeam == nil { + break + } + + args, err := ec.field_Mutation_createTeam_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateTeam(childComplexity, args["input"].(gqlmodel.CreateTeamInput)), true + + case "Mutation.deleteMe": + if e.complexity.Mutation.DeleteMe == nil { + break + } + + args, err := ec.field_Mutation_deleteMe_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.DeleteMe(childComplexity, args["input"].(gqlmodel.DeleteMeInput)), true + + case "Mutation.deleteProject": + if e.complexity.Mutation.DeleteProject == nil { + break + } + + args, err := ec.field_Mutation_deleteProject_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.DeleteProject(childComplexity, args["input"].(gqlmodel.DeleteProjectInput)), true + + case "Mutation.deleteTeam": + if e.complexity.Mutation.DeleteTeam == nil { + break + } + + args, err := ec.field_Mutation_deleteTeam_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.DeleteTeam(childComplexity, args["input"].(gqlmodel.DeleteTeamInput)), true + + case "Mutation.detachTagFromLayer": + if e.complexity.Mutation.DetachTagFromLayer == nil { + break + } + + args, err := ec.field_Mutation_detachTagFromLayer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.DetachTagFromLayer(childComplexity, args["input"].(gqlmodel.DetachTagFromLayerInput)), true + + case "Mutation.detachTagItemFromGroup": + if e.complexity.Mutation.DetachTagItemFromGroup == nil { + break + } + + args, err := ec.field_Mutation_detachTagItemFromGroup_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.DetachTagItemFromGroup(childComplexity, args["input"].(gqlmodel.DetachTagItemFromGroupInput)), true + + case "Mutation.importDataset": + if e.complexity.Mutation.ImportDataset == nil { + break + } + + args, err := ec.field_Mutation_importDataset_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.ImportDataset(childComplexity, args["input"].(gqlmodel.ImportDatasetInput)), true + + case "Mutation.importDatasetFromGoogleSheet": + if e.complexity.Mutation.ImportDatasetFromGoogleSheet == nil { + break + } + + args, err := ec.field_Mutation_importDatasetFromGoogleSheet_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.ImportDatasetFromGoogleSheet(childComplexity, args["input"].(gqlmodel.ImportDatasetFromGoogleSheetInput)), true + + case "Mutation.importLayer": + if e.complexity.Mutation.ImportLayer == nil { + break + } + + args, err := ec.field_Mutation_importLayer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.ImportLayer(childComplexity, args["input"].(gqlmodel.ImportLayerInput)), true + + case "Mutation.installPlugin": + if e.complexity.Mutation.InstallPlugin == nil { + break + } + + args, err := ec.field_Mutation_installPlugin_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.InstallPlugin(childComplexity, args["input"].(gqlmodel.InstallPluginInput)), true + + case "Mutation.linkDatasetToPropertyValue": + if e.complexity.Mutation.LinkDatasetToPropertyValue == nil { + break + } + + args, err := ec.field_Mutation_linkDatasetToPropertyValue_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.LinkDatasetToPropertyValue(childComplexity, args["input"].(gqlmodel.LinkDatasetToPropertyValueInput)), true + + case "Mutation.moveInfoboxField": + if e.complexity.Mutation.MoveInfoboxField == nil { + break + } + + args, err := ec.field_Mutation_moveInfoboxField_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.MoveInfoboxField(childComplexity, args["input"].(gqlmodel.MoveInfoboxFieldInput)), true + + case "Mutation.moveLayer": + if e.complexity.Mutation.MoveLayer == nil { + break + } + + args, err := ec.field_Mutation_moveLayer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.MoveLayer(childComplexity, args["input"].(gqlmodel.MoveLayerInput)), true + + case "Mutation.movePropertyItem": + if e.complexity.Mutation.MovePropertyItem == nil { + break + } + + args, err := ec.field_Mutation_movePropertyItem_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.MovePropertyItem(childComplexity, args["input"].(gqlmodel.MovePropertyItemInput)), true + + case "Mutation.publishProject": + if e.complexity.Mutation.PublishProject == nil { + break + } + + args, err := ec.field_Mutation_publishProject_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.PublishProject(childComplexity, args["input"].(gqlmodel.PublishProjectInput)), true + + case "Mutation.removeAsset": + if e.complexity.Mutation.RemoveAsset == nil { + break + } + + args, err := ec.field_Mutation_removeAsset_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveAsset(childComplexity, args["input"].(gqlmodel.RemoveAssetInput)), true + + case "Mutation.removeCluster": + if e.complexity.Mutation.RemoveCluster == nil { + break + } + + args, err := ec.field_Mutation_removeCluster_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveCluster(childComplexity, args["input"].(gqlmodel.RemoveClusterInput)), true + + case "Mutation.removeDatasetSchema": + if e.complexity.Mutation.RemoveDatasetSchema == nil { + break + } + + args, err := ec.field_Mutation_removeDatasetSchema_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveDatasetSchema(childComplexity, args["input"].(gqlmodel.RemoveDatasetSchemaInput)), true + + case "Mutation.removeInfobox": + if e.complexity.Mutation.RemoveInfobox == nil { + break + } + + args, err := ec.field_Mutation_removeInfobox_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveInfobox(childComplexity, args["input"].(gqlmodel.RemoveInfoboxInput)), true + + case "Mutation.removeInfoboxField": + if e.complexity.Mutation.RemoveInfoboxField == nil { + break + } + + args, err := ec.field_Mutation_removeInfoboxField_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveInfoboxField(childComplexity, args["input"].(gqlmodel.RemoveInfoboxFieldInput)), true + + case "Mutation.removeLayer": + if e.complexity.Mutation.RemoveLayer == nil { + break + } + + args, err := ec.field_Mutation_removeLayer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveLayer(childComplexity, args["input"].(gqlmodel.RemoveLayerInput)), true + + case "Mutation.removeMemberFromTeam": + if e.complexity.Mutation.RemoveMemberFromTeam == nil { + break + } + + args, err := ec.field_Mutation_removeMemberFromTeam_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveMemberFromTeam(childComplexity, args["input"].(gqlmodel.RemoveMemberFromTeamInput)), true + + case "Mutation.removeMyAuth": + if e.complexity.Mutation.RemoveMyAuth == nil { + break + } + + args, err := ec.field_Mutation_removeMyAuth_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveMyAuth(childComplexity, args["input"].(gqlmodel.RemoveMyAuthInput)), true + + case "Mutation.removePropertyField": + if e.complexity.Mutation.RemovePropertyField == nil { + break + } + + args, err := ec.field_Mutation_removePropertyField_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemovePropertyField(childComplexity, args["input"].(gqlmodel.RemovePropertyFieldInput)), true + + case "Mutation.removePropertyItem": + if e.complexity.Mutation.RemovePropertyItem == nil { + break + } + + args, err := ec.field_Mutation_removePropertyItem_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemovePropertyItem(childComplexity, args["input"].(gqlmodel.RemovePropertyItemInput)), true + + case "Mutation.removeTag": + if e.complexity.Mutation.RemoveTag == nil { + break + } + + args, err := ec.field_Mutation_removeTag_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveTag(childComplexity, args["input"].(gqlmodel.RemoveTagInput)), true + + case "Mutation.removeWidget": + if e.complexity.Mutation.RemoveWidget == nil { + break + } + + args, err := ec.field_Mutation_removeWidget_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveWidget(childComplexity, args["input"].(gqlmodel.RemoveWidgetInput)), true + + case "Mutation.signup": + if e.complexity.Mutation.Signup == nil { + break + } + + args, err := ec.field_Mutation_signup_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.Signup(childComplexity, args["input"].(gqlmodel.SignupInput)), true + + case "Mutation.syncDataset": + if e.complexity.Mutation.SyncDataset == nil { + break + } + + args, err := ec.field_Mutation_syncDataset_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.SyncDataset(childComplexity, args["input"].(gqlmodel.SyncDatasetInput)), true + + case "Mutation.uninstallPlugin": + if e.complexity.Mutation.UninstallPlugin == nil { + break + } + + args, err := ec.field_Mutation_uninstallPlugin_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UninstallPlugin(childComplexity, args["input"].(gqlmodel.UninstallPluginInput)), true + + case "Mutation.unlinkPropertyValue": + if e.complexity.Mutation.UnlinkPropertyValue == nil { + break + } + + args, err := ec.field_Mutation_unlinkPropertyValue_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UnlinkPropertyValue(childComplexity, args["input"].(gqlmodel.UnlinkPropertyValueInput)), true + + case "Mutation.updateCluster": + if e.complexity.Mutation.UpdateCluster == nil { + break + } + + args, err := ec.field_Mutation_updateCluster_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateCluster(childComplexity, args["input"].(gqlmodel.UpdateClusterInput)), true + + case "Mutation.updateDatasetSchema": + if e.complexity.Mutation.UpdateDatasetSchema == nil { + break + } + + args, err := ec.field_Mutation_updateDatasetSchema_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateDatasetSchema(childComplexity, args["input"].(gqlmodel.UpdateDatasetSchemaInput)), true + + case "Mutation.updateLayer": + if e.complexity.Mutation.UpdateLayer == nil { + break + } + + args, err := ec.field_Mutation_updateLayer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateLayer(childComplexity, args["input"].(gqlmodel.UpdateLayerInput)), true + + case "Mutation.updateMe": + if e.complexity.Mutation.UpdateMe == nil { + break + } + + args, err := ec.field_Mutation_updateMe_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateMe(childComplexity, args["input"].(gqlmodel.UpdateMeInput)), true + + case "Mutation.updateMemberOfTeam": + if e.complexity.Mutation.UpdateMemberOfTeam == nil { + break + } + + args, err := ec.field_Mutation_updateMemberOfTeam_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateMemberOfTeam(childComplexity, args["input"].(gqlmodel.UpdateMemberOfTeamInput)), true + + case "Mutation.updateProject": + if e.complexity.Mutation.UpdateProject == nil { + break + } + + args, err := ec.field_Mutation_updateProject_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateProject(childComplexity, args["input"].(gqlmodel.UpdateProjectInput)), true + + case "Mutation.updatePropertyItems": + if e.complexity.Mutation.UpdatePropertyItems == nil { + break + } + + args, err := ec.field_Mutation_updatePropertyItems_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdatePropertyItems(childComplexity, args["input"].(gqlmodel.UpdatePropertyItemInput)), true + + case "Mutation.updatePropertyValue": + if e.complexity.Mutation.UpdatePropertyValue == nil { + break + } + + args, err := ec.field_Mutation_updatePropertyValue_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdatePropertyValue(childComplexity, args["input"].(gqlmodel.UpdatePropertyValueInput)), true + + case "Mutation.updateTag": + if e.complexity.Mutation.UpdateTag == nil { + break + } + + args, err := ec.field_Mutation_updateTag_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateTag(childComplexity, args["input"].(gqlmodel.UpdateTagInput)), true + + case "Mutation.updateTeam": + if e.complexity.Mutation.UpdateTeam == nil { + break + } + + args, err := ec.field_Mutation_updateTeam_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateTeam(childComplexity, args["input"].(gqlmodel.UpdateTeamInput)), true + + case "Mutation.updateWidget": + if e.complexity.Mutation.UpdateWidget == nil { + break + } + + args, err := ec.field_Mutation_updateWidget_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateWidget(childComplexity, args["input"].(gqlmodel.UpdateWidgetInput)), true + + case "Mutation.updateWidgetAlignSystem": + if e.complexity.Mutation.UpdateWidgetAlignSystem == nil { + break + } + + args, err := ec.field_Mutation_updateWidgetAlignSystem_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateWidgetAlignSystem(childComplexity, args["input"].(gqlmodel.UpdateWidgetAlignSystemInput)), true + + case "Mutation.upgradePlugin": + if e.complexity.Mutation.UpgradePlugin == nil { + break + } + + args, err := ec.field_Mutation_upgradePlugin_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpgradePlugin(childComplexity, args["input"].(gqlmodel.UpgradePluginInput)), true + + case "Mutation.uploadFileToProperty": + if e.complexity.Mutation.UploadFileToProperty == nil { + break + } + + args, err := ec.field_Mutation_uploadFileToProperty_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UploadFileToProperty(childComplexity, args["input"].(gqlmodel.UploadFileToPropertyInput)), true + + case "Mutation.uploadPlugin": + if e.complexity.Mutation.UploadPlugin == nil { + break + } + + args, err := ec.field_Mutation_uploadPlugin_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UploadPlugin(childComplexity, args["input"].(gqlmodel.UploadPluginInput)), true + + case "PageInfo.endCursor": + if e.complexity.PageInfo.EndCursor == nil { + break + } + + return e.complexity.PageInfo.EndCursor(childComplexity), true + + case "PageInfo.hasNextPage": + if e.complexity.PageInfo.HasNextPage == nil { + break + } + + return e.complexity.PageInfo.HasNextPage(childComplexity), true + + case "PageInfo.hasPreviousPage": + if e.complexity.PageInfo.HasPreviousPage == nil { + break + } + + return e.complexity.PageInfo.HasPreviousPage(childComplexity), true + + case "PageInfo.startCursor": + if e.complexity.PageInfo.StartCursor == nil { + break + } + + return e.complexity.PageInfo.StartCursor(childComplexity), true + + case "Plugin.allTranslatedDescription": + if e.complexity.Plugin.AllTranslatedDescription == nil { + break + } + + return e.complexity.Plugin.AllTranslatedDescription(childComplexity), true + + case "Plugin.allTranslatedName": + if e.complexity.Plugin.AllTranslatedName == nil { + break + } + + return e.complexity.Plugin.AllTranslatedName(childComplexity), true + + case "Plugin.author": + if e.complexity.Plugin.Author == nil { + break + } + + return e.complexity.Plugin.Author(childComplexity), true + + case "Plugin.description": + if e.complexity.Plugin.Description == nil { + break + } + + return e.complexity.Plugin.Description(childComplexity), true + + case "Plugin.extensions": + if e.complexity.Plugin.Extensions == nil { + break + } + + return e.complexity.Plugin.Extensions(childComplexity), true + + case "Plugin.id": + if e.complexity.Plugin.ID == nil { + break + } + + return e.complexity.Plugin.ID(childComplexity), true + + case "Plugin.name": + if e.complexity.Plugin.Name == nil { + break + } + + return e.complexity.Plugin.Name(childComplexity), true + + case "Plugin.propertySchema": + if e.complexity.Plugin.PropertySchema == nil { + break + } + + return e.complexity.Plugin.PropertySchema(childComplexity), true + + case "Plugin.propertySchemaId": + if e.complexity.Plugin.PropertySchemaID == nil { + break + } + + return e.complexity.Plugin.PropertySchemaID(childComplexity), true + + case "Plugin.repositoryUrl": + if e.complexity.Plugin.RepositoryURL == nil { + break + } + + return e.complexity.Plugin.RepositoryURL(childComplexity), true + + case "Plugin.scene": + if e.complexity.Plugin.Scene == nil { + break + } + + return e.complexity.Plugin.Scene(childComplexity), true + + case "Plugin.sceneId": + if e.complexity.Plugin.SceneID == nil { + break + } + + return e.complexity.Plugin.SceneID(childComplexity), true + + case "Plugin.scenePlugin": + if e.complexity.Plugin.ScenePlugin == nil { + break + } + + args, err := ec.field_Plugin_scenePlugin_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Plugin.ScenePlugin(childComplexity, args["sceneId"].(*gqlmodel.ID)), true + + case "Plugin.translatedDescription": + if e.complexity.Plugin.TranslatedDescription == nil { + break + } + + args, err := ec.field_Plugin_translatedDescription_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Plugin.TranslatedDescription(childComplexity, args["lang"].(*language.Tag)), true + + case "Plugin.translatedName": + if e.complexity.Plugin.TranslatedName == nil { + break + } + + args, err := ec.field_Plugin_translatedName_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Plugin.TranslatedName(childComplexity, args["lang"].(*language.Tag)), true + + case "Plugin.version": + if e.complexity.Plugin.Version == nil { + break + } + + return e.complexity.Plugin.Version(childComplexity), true + + case "PluginExtension.allTranslatedDescription": + if e.complexity.PluginExtension.AllTranslatedDescription == nil { + break + } + + return e.complexity.PluginExtension.AllTranslatedDescription(childComplexity), true + + case "PluginExtension.allTranslatedName": + if e.complexity.PluginExtension.AllTranslatedName == nil { + break + } + + return e.complexity.PluginExtension.AllTranslatedName(childComplexity), true + + case "PluginExtension.description": + if e.complexity.PluginExtension.Description == nil { + break + } + + return e.complexity.PluginExtension.Description(childComplexity), true + + case "PluginExtension.extensionId": + if e.complexity.PluginExtension.ExtensionID == nil { + break + } + + return e.complexity.PluginExtension.ExtensionID(childComplexity), true + + case "PluginExtension.icon": + if e.complexity.PluginExtension.Icon == nil { + break + } + + return e.complexity.PluginExtension.Icon(childComplexity), true + + case "PluginExtension.name": + if e.complexity.PluginExtension.Name == nil { + break + } + + return e.complexity.PluginExtension.Name(childComplexity), true + + case "PluginExtension.plugin": + if e.complexity.PluginExtension.Plugin == nil { + break + } + + return e.complexity.PluginExtension.Plugin(childComplexity), true + + case "PluginExtension.pluginId": + if e.complexity.PluginExtension.PluginID == nil { + break + } + + return e.complexity.PluginExtension.PluginID(childComplexity), true + + case "PluginExtension.propertySchema": + if e.complexity.PluginExtension.PropertySchema == nil { + break + } + + return e.complexity.PluginExtension.PropertySchema(childComplexity), true + + case "PluginExtension.propertySchemaId": + if e.complexity.PluginExtension.PropertySchemaID == nil { + break + } + + return e.complexity.PluginExtension.PropertySchemaID(childComplexity), true + + case "PluginExtension.sceneWidget": + if e.complexity.PluginExtension.SceneWidget == nil { + break + } + + args, err := ec.field_PluginExtension_sceneWidget_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PluginExtension.SceneWidget(childComplexity, args["sceneId"].(gqlmodel.ID)), true + + case "PluginExtension.singleOnly": + if e.complexity.PluginExtension.SingleOnly == nil { + break + } + + return e.complexity.PluginExtension.SingleOnly(childComplexity), true + + case "PluginExtension.translatedDescription": + if e.complexity.PluginExtension.TranslatedDescription == nil { + break + } + + args, err := ec.field_PluginExtension_translatedDescription_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PluginExtension.TranslatedDescription(childComplexity, args["lang"].(*language.Tag)), true + + case "PluginExtension.translatedName": + if e.complexity.PluginExtension.TranslatedName == nil { + break + } + + args, err := ec.field_PluginExtension_translatedName_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PluginExtension.TranslatedName(childComplexity, args["lang"].(*language.Tag)), true + + case "PluginExtension.type": + if e.complexity.PluginExtension.Type == nil { + break + } + + return e.complexity.PluginExtension.Type(childComplexity), true + + case "PluginExtension.visualizer": + if e.complexity.PluginExtension.Visualizer == nil { + break + } + + return e.complexity.PluginExtension.Visualizer(childComplexity), true + + case "PluginExtension.widgetLayout": + if e.complexity.PluginExtension.WidgetLayout == nil { + break + } + + return e.complexity.PluginExtension.WidgetLayout(childComplexity), true + + case "Project.alias": + if e.complexity.Project.Alias == nil { + break + } + + return e.complexity.Project.Alias(childComplexity), true + + case "Project.basicAuthPassword": + if e.complexity.Project.BasicAuthPassword == nil { + break + } + + return e.complexity.Project.BasicAuthPassword(childComplexity), true + + case "Project.basicAuthUsername": + if e.complexity.Project.BasicAuthUsername == nil { + break + } + + return e.complexity.Project.BasicAuthUsername(childComplexity), true + + case "Project.createdAt": + if e.complexity.Project.CreatedAt == nil { + break + } + + return e.complexity.Project.CreatedAt(childComplexity), true + + case "Project.description": + if e.complexity.Project.Description == nil { + break + } + + return e.complexity.Project.Description(childComplexity), true + + case "Project.id": + if e.complexity.Project.ID == nil { + break + } + + return e.complexity.Project.ID(childComplexity), true + + case "Project.imageUrl": + if e.complexity.Project.ImageURL == nil { + break + } + + return e.complexity.Project.ImageURL(childComplexity), true + + case "Project.isArchived": + if e.complexity.Project.IsArchived == nil { + break + } + + return e.complexity.Project.IsArchived(childComplexity), true + + case "Project.isBasicAuthActive": + if e.complexity.Project.IsBasicAuthActive == nil { + break + } + + return e.complexity.Project.IsBasicAuthActive(childComplexity), true + + case "Project.name": + if e.complexity.Project.Name == nil { + break + } + + return e.complexity.Project.Name(childComplexity), true + + case "Project.publicDescription": + if e.complexity.Project.PublicDescription == nil { + break + } + + return e.complexity.Project.PublicDescription(childComplexity), true + + case "Project.publicImage": + if e.complexity.Project.PublicImage == nil { + break + } + + return e.complexity.Project.PublicImage(childComplexity), true + + case "Project.publicNoIndex": + if e.complexity.Project.PublicNoIndex == nil { + break + } + + return e.complexity.Project.PublicNoIndex(childComplexity), true + + case "Project.publicTitle": + if e.complexity.Project.PublicTitle == nil { + break + } + + return e.complexity.Project.PublicTitle(childComplexity), true + + case "Project.publishedAt": + if e.complexity.Project.PublishedAt == nil { + break + } + + return e.complexity.Project.PublishedAt(childComplexity), true + + case "Project.publishmentStatus": + if e.complexity.Project.PublishmentStatus == nil { + break + } + + return e.complexity.Project.PublishmentStatus(childComplexity), true + + case "Project.scene": + if e.complexity.Project.Scene == nil { + break + } + + return e.complexity.Project.Scene(childComplexity), true + + case "Project.team": + if e.complexity.Project.Team == nil { + break + } + + return e.complexity.Project.Team(childComplexity), true + + case "Project.teamId": + if e.complexity.Project.TeamID == nil { + break + } + + return e.complexity.Project.TeamID(childComplexity), true + + case "Project.updatedAt": + if e.complexity.Project.UpdatedAt == nil { + break + } + + return e.complexity.Project.UpdatedAt(childComplexity), true + + case "Project.visualizer": + if e.complexity.Project.Visualizer == nil { + break + } + + return e.complexity.Project.Visualizer(childComplexity), true + + case "ProjectAliasAvailability.alias": + if e.complexity.ProjectAliasAvailability.Alias == nil { + break + } + + return e.complexity.ProjectAliasAvailability.Alias(childComplexity), true + + case "ProjectAliasAvailability.available": + if e.complexity.ProjectAliasAvailability.Available == nil { + break + } + + return e.complexity.ProjectAliasAvailability.Available(childComplexity), true + + case "ProjectConnection.edges": + if e.complexity.ProjectConnection.Edges == nil { + break + } + + return e.complexity.ProjectConnection.Edges(childComplexity), true + + case "ProjectConnection.nodes": + if e.complexity.ProjectConnection.Nodes == nil { + break + } + + return e.complexity.ProjectConnection.Nodes(childComplexity), true + + case "ProjectConnection.pageInfo": + if e.complexity.ProjectConnection.PageInfo == nil { + break + } + + return e.complexity.ProjectConnection.PageInfo(childComplexity), true + + case "ProjectConnection.totalCount": + if e.complexity.ProjectConnection.TotalCount == nil { + break + } + + return e.complexity.ProjectConnection.TotalCount(childComplexity), true + + case "ProjectEdge.cursor": + if e.complexity.ProjectEdge.Cursor == nil { + break + } + + return e.complexity.ProjectEdge.Cursor(childComplexity), true + + case "ProjectEdge.node": + if e.complexity.ProjectEdge.Node == nil { + break + } + + return e.complexity.ProjectEdge.Node(childComplexity), true + + case "ProjectPayload.project": + if e.complexity.ProjectPayload.Project == nil { + break + } + + return e.complexity.ProjectPayload.Project(childComplexity), true + + case "Property.id": + if e.complexity.Property.ID == nil { + break + } + + return e.complexity.Property.ID(childComplexity), true + + case "Property.items": + if e.complexity.Property.Items == nil { + break + } + + return e.complexity.Property.Items(childComplexity), true + + case "Property.layer": + if e.complexity.Property.Layer == nil { + break + } + + return e.complexity.Property.Layer(childComplexity), true + + case "Property.merged": + if e.complexity.Property.Merged == nil { + break + } + + return e.complexity.Property.Merged(childComplexity), true + + case "Property.schema": + if e.complexity.Property.Schema == nil { + break + } + + return e.complexity.Property.Schema(childComplexity), true + + case "Property.schemaId": + if e.complexity.Property.SchemaID == nil { + break + } + + return e.complexity.Property.SchemaID(childComplexity), true + + case "PropertyCondition.fieldId": + if e.complexity.PropertyCondition.FieldID == nil { + break + } + + return e.complexity.PropertyCondition.FieldID(childComplexity), true + + case "PropertyCondition.type": + if e.complexity.PropertyCondition.Type == nil { + break + } + + return e.complexity.PropertyCondition.Type(childComplexity), true + + case "PropertyCondition.value": + if e.complexity.PropertyCondition.Value == nil { + break + } + + return e.complexity.PropertyCondition.Value(childComplexity), true + + case "PropertyField.actualValue": + if e.complexity.PropertyField.ActualValue == nil { + break + } + + return e.complexity.PropertyField.ActualValue(childComplexity), true + + case "PropertyField.field": + if e.complexity.PropertyField.Field == nil { + break + } + + return e.complexity.PropertyField.Field(childComplexity), true + + case "PropertyField.fieldId": + if e.complexity.PropertyField.FieldID == nil { + break + } + + return e.complexity.PropertyField.FieldID(childComplexity), true + + case "PropertyField.id": + if e.complexity.PropertyField.ID == nil { + break + } + + return e.complexity.PropertyField.ID(childComplexity), true + + case "PropertyField.links": + if e.complexity.PropertyField.Links == nil { + break + } + + return e.complexity.PropertyField.Links(childComplexity), true + + case "PropertyField.parent": + if e.complexity.PropertyField.Parent == nil { + break + } + + return e.complexity.PropertyField.Parent(childComplexity), true + + case "PropertyField.parentId": + if e.complexity.PropertyField.ParentID == nil { + break + } + + return e.complexity.PropertyField.ParentID(childComplexity), true + + case "PropertyField.schema": + if e.complexity.PropertyField.Schema == nil { + break + } + + return e.complexity.PropertyField.Schema(childComplexity), true + + case "PropertyField.schemaId": + if e.complexity.PropertyField.SchemaID == nil { + break + } + + return e.complexity.PropertyField.SchemaID(childComplexity), true + + case "PropertyField.type": + if e.complexity.PropertyField.Type == nil { + break + } + + return e.complexity.PropertyField.Type(childComplexity), true + + case "PropertyField.value": + if e.complexity.PropertyField.Value == nil { + break + } + + return e.complexity.PropertyField.Value(childComplexity), true + + case "PropertyFieldLink.dataset": + if e.complexity.PropertyFieldLink.Dataset == nil { + break + } + + return e.complexity.PropertyFieldLink.Dataset(childComplexity), true + + case "PropertyFieldLink.datasetField": + if e.complexity.PropertyFieldLink.DatasetField == nil { + break + } + + return e.complexity.PropertyFieldLink.DatasetField(childComplexity), true + + case "PropertyFieldLink.datasetId": + if e.complexity.PropertyFieldLink.DatasetID == nil { + break + } + + return e.complexity.PropertyFieldLink.DatasetID(childComplexity), true + + case "PropertyFieldLink.datasetSchema": + if e.complexity.PropertyFieldLink.DatasetSchema == nil { + break + } + + return e.complexity.PropertyFieldLink.DatasetSchema(childComplexity), true + + case "PropertyFieldLink.datasetSchemaField": + if e.complexity.PropertyFieldLink.DatasetSchemaField == nil { + break + } + + return e.complexity.PropertyFieldLink.DatasetSchemaField(childComplexity), true + + case "PropertyFieldLink.datasetSchemaFieldId": + if e.complexity.PropertyFieldLink.DatasetSchemaFieldID == nil { + break + } + + return e.complexity.PropertyFieldLink.DatasetSchemaFieldID(childComplexity), true + + case "PropertyFieldLink.datasetSchemaId": + if e.complexity.PropertyFieldLink.DatasetSchemaID == nil { + break + } + + return e.complexity.PropertyFieldLink.DatasetSchemaID(childComplexity), true + + case "PropertyFieldPayload.property": + if e.complexity.PropertyFieldPayload.Property == nil { + break + } + + return e.complexity.PropertyFieldPayload.Property(childComplexity), true + + case "PropertyFieldPayload.propertyField": + if e.complexity.PropertyFieldPayload.PropertyField == nil { + break + } + + return e.complexity.PropertyFieldPayload.PropertyField(childComplexity), true + + case "PropertyGroup.fields": + if e.complexity.PropertyGroup.Fields == nil { + break + } + + return e.complexity.PropertyGroup.Fields(childComplexity), true + + case "PropertyGroup.id": + if e.complexity.PropertyGroup.ID == nil { + break + } + + return e.complexity.PropertyGroup.ID(childComplexity), true + + case "PropertyGroup.schema": + if e.complexity.PropertyGroup.Schema == nil { + break + } + + return e.complexity.PropertyGroup.Schema(childComplexity), true + + case "PropertyGroup.schemaGroup": + if e.complexity.PropertyGroup.SchemaGroup == nil { + break + } + + return e.complexity.PropertyGroup.SchemaGroup(childComplexity), true + + case "PropertyGroup.schemaGroupId": + if e.complexity.PropertyGroup.SchemaGroupID == nil { + break + } + + return e.complexity.PropertyGroup.SchemaGroupID(childComplexity), true + + case "PropertyGroup.schemaId": + if e.complexity.PropertyGroup.SchemaID == nil { + break + } + + return e.complexity.PropertyGroup.SchemaID(childComplexity), true + + case "PropertyGroupList.groups": + if e.complexity.PropertyGroupList.Groups == nil { + break + } + + return e.complexity.PropertyGroupList.Groups(childComplexity), true + + case "PropertyGroupList.id": + if e.complexity.PropertyGroupList.ID == nil { + break + } + + return e.complexity.PropertyGroupList.ID(childComplexity), true + + case "PropertyGroupList.schema": + if e.complexity.PropertyGroupList.Schema == nil { + break + } + + return e.complexity.PropertyGroupList.Schema(childComplexity), true + + case "PropertyGroupList.schemaGroup": + if e.complexity.PropertyGroupList.SchemaGroup == nil { + break + } + + return e.complexity.PropertyGroupList.SchemaGroup(childComplexity), true + + case "PropertyGroupList.schemaGroupId": + if e.complexity.PropertyGroupList.SchemaGroupID == nil { + break + } + + return e.complexity.PropertyGroupList.SchemaGroupID(childComplexity), true + + case "PropertyGroupList.schemaId": + if e.complexity.PropertyGroupList.SchemaID == nil { + break + } + + return e.complexity.PropertyGroupList.SchemaID(childComplexity), true + + case "PropertyItemPayload.property": + if e.complexity.PropertyItemPayload.Property == nil { + break + } + + return e.complexity.PropertyItemPayload.Property(childComplexity), true + + case "PropertyItemPayload.propertyItem": + if e.complexity.PropertyItemPayload.PropertyItem == nil { + break + } + + return e.complexity.PropertyItemPayload.PropertyItem(childComplexity), true + + case "PropertyLinkableFields.latlng": + if e.complexity.PropertyLinkableFields.Latlng == nil { + break + } + + return e.complexity.PropertyLinkableFields.Latlng(childComplexity), true + + case "PropertyLinkableFields.latlngField": + if e.complexity.PropertyLinkableFields.LatlngField == nil { + break + } + + return e.complexity.PropertyLinkableFields.LatlngField(childComplexity), true + + case "PropertyLinkableFields.schema": + if e.complexity.PropertyLinkableFields.Schema == nil { + break + } + + return e.complexity.PropertyLinkableFields.Schema(childComplexity), true + + case "PropertyLinkableFields.schemaId": + if e.complexity.PropertyLinkableFields.SchemaID == nil { + break + } + + return e.complexity.PropertyLinkableFields.SchemaID(childComplexity), true + + case "PropertyLinkableFields.url": + if e.complexity.PropertyLinkableFields.URL == nil { + break + } + + return e.complexity.PropertyLinkableFields.URL(childComplexity), true + + case "PropertyLinkableFields.urlField": + if e.complexity.PropertyLinkableFields.URLField == nil { + break + } + + return e.complexity.PropertyLinkableFields.URLField(childComplexity), true + + case "PropertySchema.groups": + if e.complexity.PropertySchema.Groups == nil { + break + } + + return e.complexity.PropertySchema.Groups(childComplexity), true + + case "PropertySchema.id": + if e.complexity.PropertySchema.ID == nil { + break + } + + return e.complexity.PropertySchema.ID(childComplexity), true + + case "PropertySchema.linkableFields": + if e.complexity.PropertySchema.LinkableFields == nil { + break + } + + return e.complexity.PropertySchema.LinkableFields(childComplexity), true + + case "PropertySchemaField.allTranslatedDescription": + if e.complexity.PropertySchemaField.AllTranslatedDescription == nil { + break + } + + return e.complexity.PropertySchemaField.AllTranslatedDescription(childComplexity), true + + case "PropertySchemaField.allTranslatedTitle": + if e.complexity.PropertySchemaField.AllTranslatedTitle == nil { + break + } + + return e.complexity.PropertySchemaField.AllTranslatedTitle(childComplexity), true + + case "PropertySchemaField.choices": + if e.complexity.PropertySchemaField.Choices == nil { + break + } + + return e.complexity.PropertySchemaField.Choices(childComplexity), true + + case "PropertySchemaField.defaultValue": + if e.complexity.PropertySchemaField.DefaultValue == nil { + break + } + + return e.complexity.PropertySchemaField.DefaultValue(childComplexity), true + + case "PropertySchemaField.description": + if e.complexity.PropertySchemaField.Description == nil { + break + } + + return e.complexity.PropertySchemaField.Description(childComplexity), true + + case "PropertySchemaField.fieldId": + if e.complexity.PropertySchemaField.FieldID == nil { + break + } + + return e.complexity.PropertySchemaField.FieldID(childComplexity), true + + case "PropertySchemaField.isAvailableIf": + if e.complexity.PropertySchemaField.IsAvailableIf == nil { + break + } + + return e.complexity.PropertySchemaField.IsAvailableIf(childComplexity), true + + case "PropertySchemaField.max": + if e.complexity.PropertySchemaField.Max == nil { + break + } + + return e.complexity.PropertySchemaField.Max(childComplexity), true + + case "PropertySchemaField.min": + if e.complexity.PropertySchemaField.Min == nil { + break + } + + return e.complexity.PropertySchemaField.Min(childComplexity), true + + case "PropertySchemaField.prefix": + if e.complexity.PropertySchemaField.Prefix == nil { + break + } + + return e.complexity.PropertySchemaField.Prefix(childComplexity), true + + case "PropertySchemaField.suffix": + if e.complexity.PropertySchemaField.Suffix == nil { + break + } + + return e.complexity.PropertySchemaField.Suffix(childComplexity), true + + case "PropertySchemaField.title": + if e.complexity.PropertySchemaField.Title == nil { + break + } + + return e.complexity.PropertySchemaField.Title(childComplexity), true + + case "PropertySchemaField.translatedDescription": + if e.complexity.PropertySchemaField.TranslatedDescription == nil { + break + } + + args, err := ec.field_PropertySchemaField_translatedDescription_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PropertySchemaField.TranslatedDescription(childComplexity, args["lang"].(*language.Tag)), true + + case "PropertySchemaField.translatedTitle": + if e.complexity.PropertySchemaField.TranslatedTitle == nil { + break + } + + args, err := ec.field_PropertySchemaField_translatedTitle_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PropertySchemaField.TranslatedTitle(childComplexity, args["lang"].(*language.Tag)), true + + case "PropertySchemaField.type": + if e.complexity.PropertySchemaField.Type == nil { + break + } + + return e.complexity.PropertySchemaField.Type(childComplexity), true + + case "PropertySchemaField.ui": + if e.complexity.PropertySchemaField.UI == nil { + break + } + + return e.complexity.PropertySchemaField.UI(childComplexity), true + + case "PropertySchemaFieldChoice.allTranslatedTitle": + if e.complexity.PropertySchemaFieldChoice.AllTranslatedTitle == nil { + break + } + + return e.complexity.PropertySchemaFieldChoice.AllTranslatedTitle(childComplexity), true + + case "PropertySchemaFieldChoice.icon": + if e.complexity.PropertySchemaFieldChoice.Icon == nil { + break + } + + return e.complexity.PropertySchemaFieldChoice.Icon(childComplexity), true + + case "PropertySchemaFieldChoice.key": + if e.complexity.PropertySchemaFieldChoice.Key == nil { + break + } + + return e.complexity.PropertySchemaFieldChoice.Key(childComplexity), true + + case "PropertySchemaFieldChoice.title": + if e.complexity.PropertySchemaFieldChoice.Title == nil { + break + } + + return e.complexity.PropertySchemaFieldChoice.Title(childComplexity), true + + case "PropertySchemaFieldChoice.translatedTitle": + if e.complexity.PropertySchemaFieldChoice.TranslatedTitle == nil { + break + } + + args, err := ec.field_PropertySchemaFieldChoice_translatedTitle_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PropertySchemaFieldChoice.TranslatedTitle(childComplexity, args["lang"].(*language.Tag)), true + + case "PropertySchemaGroup.allTranslatedTitle": + if e.complexity.PropertySchemaGroup.AllTranslatedTitle == nil { + break + } + + return e.complexity.PropertySchemaGroup.AllTranslatedTitle(childComplexity), true + + case "PropertySchemaGroup.fields": + if e.complexity.PropertySchemaGroup.Fields == nil { + break + } + + return e.complexity.PropertySchemaGroup.Fields(childComplexity), true + + case "PropertySchemaGroup.isAvailableIf": + if e.complexity.PropertySchemaGroup.IsAvailableIf == nil { + break + } + + return e.complexity.PropertySchemaGroup.IsAvailableIf(childComplexity), true + + case "PropertySchemaGroup.isList": + if e.complexity.PropertySchemaGroup.IsList == nil { + break + } + + return e.complexity.PropertySchemaGroup.IsList(childComplexity), true + + case "PropertySchemaGroup.representativeField": + if e.complexity.PropertySchemaGroup.RepresentativeField == nil { + break + } + + return e.complexity.PropertySchemaGroup.RepresentativeField(childComplexity), true + + case "PropertySchemaGroup.representativeFieldId": + if e.complexity.PropertySchemaGroup.RepresentativeFieldID == nil { + break + } + + return e.complexity.PropertySchemaGroup.RepresentativeFieldID(childComplexity), true + + case "PropertySchemaGroup.schema": + if e.complexity.PropertySchemaGroup.Schema == nil { + break + } + + return e.complexity.PropertySchemaGroup.Schema(childComplexity), true + + case "PropertySchemaGroup.schemaGroupId": + if e.complexity.PropertySchemaGroup.SchemaGroupID == nil { + break + } + + return e.complexity.PropertySchemaGroup.SchemaGroupID(childComplexity), true + + case "PropertySchemaGroup.schemaId": + if e.complexity.PropertySchemaGroup.SchemaID == nil { + break + } + + return e.complexity.PropertySchemaGroup.SchemaID(childComplexity), true + + case "PropertySchemaGroup.title": + if e.complexity.PropertySchemaGroup.Title == nil { + break + } + + return e.complexity.PropertySchemaGroup.Title(childComplexity), true + + case "PropertySchemaGroup.translatedTitle": + if e.complexity.PropertySchemaGroup.TranslatedTitle == nil { + break + } + + args, err := ec.field_PropertySchemaGroup_translatedTitle_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PropertySchemaGroup.TranslatedTitle(childComplexity, args["lang"].(*language.Tag)), true + + case "Query.assets": + if e.complexity.Query.Assets == nil { + break + } + + args, err := ec.field_Query_assets_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Assets(childComplexity, args["teamId"].(gqlmodel.ID), args["keyword"].(*string), args["sort"].(*gqlmodel.AssetSortType), args["pagination"].(*gqlmodel.Pagination)), true + + case "Query.checkProjectAlias": + if e.complexity.Query.CheckProjectAlias == nil { + break + } + + args, err := ec.field_Query_checkProjectAlias_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.CheckProjectAlias(childComplexity, args["alias"].(string)), true + + case "Query.datasetSchemas": + if e.complexity.Query.DatasetSchemas == nil { + break + } + + args, err := ec.field_Query_datasetSchemas_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.DatasetSchemas(childComplexity, args["sceneId"].(gqlmodel.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "Query.datasets": + if e.complexity.Query.Datasets == nil { + break + } + + args, err := ec.field_Query_datasets_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Datasets(childComplexity, args["datasetSchemaId"].(gqlmodel.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "Query.dynamicDatasetSchemas": + if e.complexity.Query.DynamicDatasetSchemas == nil { + break + } + + args, err := ec.field_Query_dynamicDatasetSchemas_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.DynamicDatasetSchemas(childComplexity, args["sceneId"].(gqlmodel.ID)), true + + case "Query.layer": + if e.complexity.Query.Layer == nil { + break + } + + args, err := ec.field_Query_layer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Layer(childComplexity, args["id"].(gqlmodel.ID)), true + + case "Query.me": + if e.complexity.Query.Me == nil { + break + } + + return e.complexity.Query.Me(childComplexity), true + + case "Query.node": + if e.complexity.Query.Node == nil { + break + } + + args, err := ec.field_Query_node_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Node(childComplexity, args["id"].(gqlmodel.ID), args["type"].(gqlmodel.NodeType)), true + + case "Query.nodes": + if e.complexity.Query.Nodes == nil { + break + } + + args, err := ec.field_Query_nodes_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Nodes(childComplexity, args["id"].([]gqlmodel.ID), args["type"].(gqlmodel.NodeType)), true + + case "Query.plugin": + if e.complexity.Query.Plugin == nil { + break + } + + args, err := ec.field_Query_plugin_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Plugin(childComplexity, args["id"].(gqlmodel.ID)), true + + case "Query.plugins": + if e.complexity.Query.Plugins == nil { + break + } + + args, err := ec.field_Query_plugins_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Plugins(childComplexity, args["id"].([]gqlmodel.ID)), true + + case "Query.projects": + if e.complexity.Query.Projects == nil { + break + } + + args, err := ec.field_Query_projects_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Projects(childComplexity, args["teamId"].(gqlmodel.ID), args["includeArchived"].(*bool), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "Query.propertySchema": + if e.complexity.Query.PropertySchema == nil { + break + } + + args, err := ec.field_Query_propertySchema_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.PropertySchema(childComplexity, args["id"].(gqlmodel.ID)), true + + case "Query.propertySchemas": + if e.complexity.Query.PropertySchemas == nil { + break + } + + args, err := ec.field_Query_propertySchemas_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.PropertySchemas(childComplexity, args["id"].([]gqlmodel.ID)), true + + case "Query.scene": + if e.complexity.Query.Scene == nil { + break + } + + args, err := ec.field_Query_scene_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Scene(childComplexity, args["projectId"].(gqlmodel.ID)), true + + case "Query.searchUser": + if e.complexity.Query.SearchUser == nil { + break + } + + args, err := ec.field_Query_searchUser_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.SearchUser(childComplexity, args["nameOrEmail"].(string)), true + + case "Rect.east": + if e.complexity.Rect.East == nil { + break + } + + return e.complexity.Rect.East(childComplexity), true + + case "Rect.north": + if e.complexity.Rect.North == nil { + break + } + + return e.complexity.Rect.North(childComplexity), true + + case "Rect.south": + if e.complexity.Rect.South == nil { + break + } + + return e.complexity.Rect.South(childComplexity), true + + case "Rect.west": + if e.complexity.Rect.West == nil { + break + } + + return e.complexity.Rect.West(childComplexity), true + + case "RemoveAssetPayload.assetId": + if e.complexity.RemoveAssetPayload.AssetID == nil { + break + } + + return e.complexity.RemoveAssetPayload.AssetID(childComplexity), true + + case "RemoveClusterPayload.clusterId": + if e.complexity.RemoveClusterPayload.ClusterID == nil { + break + } + + return e.complexity.RemoveClusterPayload.ClusterID(childComplexity), true + + case "RemoveClusterPayload.scene": + if e.complexity.RemoveClusterPayload.Scene == nil { + break + } + + return e.complexity.RemoveClusterPayload.Scene(childComplexity), true + + case "RemoveDatasetSchemaPayload.schemaId": + if e.complexity.RemoveDatasetSchemaPayload.SchemaID == nil { + break + } + + return e.complexity.RemoveDatasetSchemaPayload.SchemaID(childComplexity), true + + case "RemoveInfoboxFieldPayload.infoboxFieldId": + if e.complexity.RemoveInfoboxFieldPayload.InfoboxFieldID == nil { + break + } + + return e.complexity.RemoveInfoboxFieldPayload.InfoboxFieldID(childComplexity), true + + case "RemoveInfoboxFieldPayload.layer": + if e.complexity.RemoveInfoboxFieldPayload.Layer == nil { + break + } + + return e.complexity.RemoveInfoboxFieldPayload.Layer(childComplexity), true + + case "RemoveInfoboxPayload.layer": + if e.complexity.RemoveInfoboxPayload.Layer == nil { + break + } + + return e.complexity.RemoveInfoboxPayload.Layer(childComplexity), true + + case "RemoveLayerPayload.layerId": + if e.complexity.RemoveLayerPayload.LayerID == nil { + break + } + + return e.complexity.RemoveLayerPayload.LayerID(childComplexity), true + + case "RemoveLayerPayload.parentLayer": + if e.complexity.RemoveLayerPayload.ParentLayer == nil { + break + } + + return e.complexity.RemoveLayerPayload.ParentLayer(childComplexity), true + + case "RemoveMemberFromTeamPayload.team": + if e.complexity.RemoveMemberFromTeamPayload.Team == nil { + break + } + + return e.complexity.RemoveMemberFromTeamPayload.Team(childComplexity), true + + case "RemoveTagPayload.tagId": + if e.complexity.RemoveTagPayload.TagID == nil { + break + } + + return e.complexity.RemoveTagPayload.TagID(childComplexity), true + + case "RemoveTagPayload.updatedLayers": + if e.complexity.RemoveTagPayload.UpdatedLayers == nil { + break + } + + return e.complexity.RemoveTagPayload.UpdatedLayers(childComplexity), true + + case "RemoveWidgetPayload.scene": + if e.complexity.RemoveWidgetPayload.Scene == nil { + break + } + + return e.complexity.RemoveWidgetPayload.Scene(childComplexity), true + + case "RemoveWidgetPayload.widgetId": + if e.complexity.RemoveWidgetPayload.WidgetID == nil { + break + } + + return e.complexity.RemoveWidgetPayload.WidgetID(childComplexity), true + + case "Scene.clusters": + if e.complexity.Scene.Clusters == nil { + break + } + + return e.complexity.Scene.Clusters(childComplexity), true + + case "Scene.createdAt": + if e.complexity.Scene.CreatedAt == nil { + break + } + + return e.complexity.Scene.CreatedAt(childComplexity), true + + case "Scene.datasetSchemas": + if e.complexity.Scene.DatasetSchemas == nil { + break + } + + args, err := ec.field_Scene_datasetSchemas_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Scene.DatasetSchemas(childComplexity, args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "Scene.dynamicDatasetSchemas": + if e.complexity.Scene.DynamicDatasetSchemas == nil { + break + } + + return e.complexity.Scene.DynamicDatasetSchemas(childComplexity), true + + case "Scene.id": + if e.complexity.Scene.ID == nil { + break + } + + return e.complexity.Scene.ID(childComplexity), true + + case "Scene.plugins": + if e.complexity.Scene.Plugins == nil { + break + } + + return e.complexity.Scene.Plugins(childComplexity), true + + case "Scene.project": + if e.complexity.Scene.Project == nil { + break + } + + return e.complexity.Scene.Project(childComplexity), true + + case "Scene.projectId": + if e.complexity.Scene.ProjectID == nil { + break + } + + return e.complexity.Scene.ProjectID(childComplexity), true + + case "Scene.property": + if e.complexity.Scene.Property == nil { + break + } + + return e.complexity.Scene.Property(childComplexity), true + + case "Scene.propertyId": + if e.complexity.Scene.PropertyID == nil { + break + } + + return e.complexity.Scene.PropertyID(childComplexity), true + + case "Scene.rootLayer": + if e.complexity.Scene.RootLayer == nil { + break + } + + return e.complexity.Scene.RootLayer(childComplexity), true + + case "Scene.rootLayerId": + if e.complexity.Scene.RootLayerID == nil { + break + } + + return e.complexity.Scene.RootLayerID(childComplexity), true + + case "Scene.tagIds": + if e.complexity.Scene.TagIds == nil { + break + } + + return e.complexity.Scene.TagIds(childComplexity), true + + case "Scene.tags": + if e.complexity.Scene.Tags == nil { + break + } + + return e.complexity.Scene.Tags(childComplexity), true + + case "Scene.team": + if e.complexity.Scene.Team == nil { + break + } + + return e.complexity.Scene.Team(childComplexity), true + + case "Scene.teamId": + if e.complexity.Scene.TeamID == nil { + break + } + + return e.complexity.Scene.TeamID(childComplexity), true + + case "Scene.updatedAt": + if e.complexity.Scene.UpdatedAt == nil { + break + } + + return e.complexity.Scene.UpdatedAt(childComplexity), true + + case "Scene.widgetAlignSystem": + if e.complexity.Scene.WidgetAlignSystem == nil { + break + } + + return e.complexity.Scene.WidgetAlignSystem(childComplexity), true + + case "Scene.widgets": + if e.complexity.Scene.Widgets == nil { + break + } + + return e.complexity.Scene.Widgets(childComplexity), true + + case "ScenePlugin.plugin": + if e.complexity.ScenePlugin.Plugin == nil { + break + } + + return e.complexity.ScenePlugin.Plugin(childComplexity), true + + case "ScenePlugin.pluginId": + if e.complexity.ScenePlugin.PluginID == nil { + break + } + + return e.complexity.ScenePlugin.PluginID(childComplexity), true + + case "ScenePlugin.property": + if e.complexity.ScenePlugin.Property == nil { + break + } + + return e.complexity.ScenePlugin.Property(childComplexity), true + + case "ScenePlugin.propertyId": + if e.complexity.ScenePlugin.PropertyID == nil { + break + } + + return e.complexity.ScenePlugin.PropertyID(childComplexity), true + + case "SceneWidget.enabled": + if e.complexity.SceneWidget.Enabled == nil { + break + } + + return e.complexity.SceneWidget.Enabled(childComplexity), true + + case "SceneWidget.extended": + if e.complexity.SceneWidget.Extended == nil { + break + } + + return e.complexity.SceneWidget.Extended(childComplexity), true + + case "SceneWidget.extension": + if e.complexity.SceneWidget.Extension == nil { + break + } + + return e.complexity.SceneWidget.Extension(childComplexity), true + + case "SceneWidget.extensionId": + if e.complexity.SceneWidget.ExtensionID == nil { + break + } + + return e.complexity.SceneWidget.ExtensionID(childComplexity), true + + case "SceneWidget.id": + if e.complexity.SceneWidget.ID == nil { + break + } + + return e.complexity.SceneWidget.ID(childComplexity), true + + case "SceneWidget.plugin": + if e.complexity.SceneWidget.Plugin == nil { + break + } + + return e.complexity.SceneWidget.Plugin(childComplexity), true + + case "SceneWidget.pluginId": + if e.complexity.SceneWidget.PluginID == nil { + break + } + + return e.complexity.SceneWidget.PluginID(childComplexity), true + + case "SceneWidget.property": + if e.complexity.SceneWidget.Property == nil { + break + } + + return e.complexity.SceneWidget.Property(childComplexity), true + + case "SceneWidget.propertyId": + if e.complexity.SceneWidget.PropertyID == nil { + break + } + + return e.complexity.SceneWidget.PropertyID(childComplexity), true + + case "SignupPayload.team": + if e.complexity.SignupPayload.Team == nil { + break + } + + return e.complexity.SignupPayload.Team(childComplexity), true + + case "SignupPayload.user": + if e.complexity.SignupPayload.User == nil { + break + } + + return e.complexity.SignupPayload.User(childComplexity), true + + case "SyncDatasetPayload.dataset": + if e.complexity.SyncDatasetPayload.Dataset == nil { + break + } + + return e.complexity.SyncDatasetPayload.Dataset(childComplexity), true + + case "SyncDatasetPayload.datasetSchema": + if e.complexity.SyncDatasetPayload.DatasetSchema == nil { + break + } + + return e.complexity.SyncDatasetPayload.DatasetSchema(childComplexity), true + + case "SyncDatasetPayload.sceneId": + if e.complexity.SyncDatasetPayload.SceneID == nil { + break + } + + return e.complexity.SyncDatasetPayload.SceneID(childComplexity), true + + case "SyncDatasetPayload.url": + if e.complexity.SyncDatasetPayload.URL == nil { + break + } + + return e.complexity.SyncDatasetPayload.URL(childComplexity), true + + case "TagGroup.id": + if e.complexity.TagGroup.ID == nil { + break + } + + return e.complexity.TagGroup.ID(childComplexity), true + + case "TagGroup.label": + if e.complexity.TagGroup.Label == nil { + break + } + + return e.complexity.TagGroup.Label(childComplexity), true + + case "TagGroup.layers": + if e.complexity.TagGroup.Layers == nil { + break + } + + return e.complexity.TagGroup.Layers(childComplexity), true + + case "TagGroup.scene": + if e.complexity.TagGroup.Scene == nil { + break + } + + return e.complexity.TagGroup.Scene(childComplexity), true + + case "TagGroup.sceneId": + if e.complexity.TagGroup.SceneID == nil { + break + } + + return e.complexity.TagGroup.SceneID(childComplexity), true + + case "TagGroup.tagIds": + if e.complexity.TagGroup.TagIds == nil { + break + } + + return e.complexity.TagGroup.TagIds(childComplexity), true + + case "TagGroup.tags": + if e.complexity.TagGroup.Tags == nil { + break + } + + return e.complexity.TagGroup.Tags(childComplexity), true + + case "TagItem.id": + if e.complexity.TagItem.ID == nil { + break + } + + return e.complexity.TagItem.ID(childComplexity), true + + case "TagItem.label": + if e.complexity.TagItem.Label == nil { + break + } + + return e.complexity.TagItem.Label(childComplexity), true + + case "TagItem.layers": + if e.complexity.TagItem.Layers == nil { + break + } + + return e.complexity.TagItem.Layers(childComplexity), true + + case "TagItem.linkedDataset": + if e.complexity.TagItem.LinkedDataset == nil { + break + } + + return e.complexity.TagItem.LinkedDataset(childComplexity), true + + case "TagItem.linkedDatasetField": + if e.complexity.TagItem.LinkedDatasetField == nil { + break + } + + return e.complexity.TagItem.LinkedDatasetField(childComplexity), true + + case "TagItem.linkedDatasetFieldID": + if e.complexity.TagItem.LinkedDatasetFieldID == nil { + break + } + + return e.complexity.TagItem.LinkedDatasetFieldID(childComplexity), true + + case "TagItem.linkedDatasetID": + if e.complexity.TagItem.LinkedDatasetID == nil { + break + } + + return e.complexity.TagItem.LinkedDatasetID(childComplexity), true + + case "TagItem.linkedDatasetSchema": + if e.complexity.TagItem.LinkedDatasetSchema == nil { + break + } + + return e.complexity.TagItem.LinkedDatasetSchema(childComplexity), true + + case "TagItem.linkedDatasetSchemaID": + if e.complexity.TagItem.LinkedDatasetSchemaID == nil { + break + } + + return e.complexity.TagItem.LinkedDatasetSchemaID(childComplexity), true + + case "TagItem.parent": + if e.complexity.TagItem.Parent == nil { + break + } + + return e.complexity.TagItem.Parent(childComplexity), true + + case "TagItem.parentId": + if e.complexity.TagItem.ParentID == nil { + break + } + + return e.complexity.TagItem.ParentID(childComplexity), true + + case "TagItem.sceneId": + if e.complexity.TagItem.SceneID == nil { + break + } + + return e.complexity.TagItem.SceneID(childComplexity), true + + case "Team.assets": + if e.complexity.Team.Assets == nil { + break + } + + args, err := ec.field_Team_assets_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Team.Assets(childComplexity, args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "Team.id": + if e.complexity.Team.ID == nil { + break + } + + return e.complexity.Team.ID(childComplexity), true + + case "Team.members": + if e.complexity.Team.Members == nil { + break + } + + return e.complexity.Team.Members(childComplexity), true + + case "Team.name": + if e.complexity.Team.Name == nil { + break + } + + return e.complexity.Team.Name(childComplexity), true + + case "Team.personal": + if e.complexity.Team.Personal == nil { + break + } + + return e.complexity.Team.Personal(childComplexity), true + + case "Team.projects": + if e.complexity.Team.Projects == nil { + break + } + + args, err := ec.field_Team_projects_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Team.Projects(childComplexity, args["includeArchived"].(*bool), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "TeamMember.role": + if e.complexity.TeamMember.Role == nil { + break + } + + return e.complexity.TeamMember.Role(childComplexity), true + + case "TeamMember.user": + if e.complexity.TeamMember.User == nil { + break + } + + return e.complexity.TeamMember.User(childComplexity), true + + case "TeamMember.userId": + if e.complexity.TeamMember.UserID == nil { + break + } + + return e.complexity.TeamMember.UserID(childComplexity), true + + case "Typography.bold": + if e.complexity.Typography.Bold == nil { + break + } + + return e.complexity.Typography.Bold(childComplexity), true + + case "Typography.color": + if e.complexity.Typography.Color == nil { + break + } + + return e.complexity.Typography.Color(childComplexity), true + + case "Typography.fontFamily": + if e.complexity.Typography.FontFamily == nil { + break + } + + return e.complexity.Typography.FontFamily(childComplexity), true + + case "Typography.fontSize": + if e.complexity.Typography.FontSize == nil { + break + } + + return e.complexity.Typography.FontSize(childComplexity), true + + case "Typography.fontWeight": + if e.complexity.Typography.FontWeight == nil { + break + } + + return e.complexity.Typography.FontWeight(childComplexity), true + + case "Typography.italic": + if e.complexity.Typography.Italic == nil { + break + } + + return e.complexity.Typography.Italic(childComplexity), true + + case "Typography.textAlign": + if e.complexity.Typography.TextAlign == nil { + break + } + + return e.complexity.Typography.TextAlign(childComplexity), true + + case "Typography.underline": + if e.complexity.Typography.Underline == nil { + break + } + + return e.complexity.Typography.Underline(childComplexity), true + + case "UninstallPluginPayload.pluginId": + if e.complexity.UninstallPluginPayload.PluginID == nil { + break + } + + return e.complexity.UninstallPluginPayload.PluginID(childComplexity), true + + case "UninstallPluginPayload.scene": + if e.complexity.UninstallPluginPayload.Scene == nil { + break + } + + return e.complexity.UninstallPluginPayload.Scene(childComplexity), true + + case "UpdateClusterPayload.cluster": + if e.complexity.UpdateClusterPayload.Cluster == nil { + break + } + + return e.complexity.UpdateClusterPayload.Cluster(childComplexity), true + + case "UpdateClusterPayload.scene": + if e.complexity.UpdateClusterPayload.Scene == nil { + break + } + + return e.complexity.UpdateClusterPayload.Scene(childComplexity), true + + case "UpdateDatasetSchemaPayload.datasetSchema": + if e.complexity.UpdateDatasetSchemaPayload.DatasetSchema == nil { + break + } + + return e.complexity.UpdateDatasetSchemaPayload.DatasetSchema(childComplexity), true + + case "UpdateLayerPayload.layer": + if e.complexity.UpdateLayerPayload.Layer == nil { + break + } + + return e.complexity.UpdateLayerPayload.Layer(childComplexity), true + + case "UpdateMePayload.me": + if e.complexity.UpdateMePayload.Me == nil { + break + } + + return e.complexity.UpdateMePayload.Me(childComplexity), true + + case "UpdateMemberOfTeamPayload.team": + if e.complexity.UpdateMemberOfTeamPayload.Team == nil { + break + } + + return e.complexity.UpdateMemberOfTeamPayload.Team(childComplexity), true + + case "UpdateTagPayload.tag": + if e.complexity.UpdateTagPayload.Tag == nil { + break + } + + return e.complexity.UpdateTagPayload.Tag(childComplexity), true + + case "UpdateTeamPayload.team": + if e.complexity.UpdateTeamPayload.Team == nil { + break + } + + return e.complexity.UpdateTeamPayload.Team(childComplexity), true + + case "UpdateWidgetAlignSystemPayload.scene": + if e.complexity.UpdateWidgetAlignSystemPayload.Scene == nil { + break + } + + return e.complexity.UpdateWidgetAlignSystemPayload.Scene(childComplexity), true + + case "UpdateWidgetPayload.scene": + if e.complexity.UpdateWidgetPayload.Scene == nil { + break + } + + return e.complexity.UpdateWidgetPayload.Scene(childComplexity), true + + case "UpdateWidgetPayload.sceneWidget": + if e.complexity.UpdateWidgetPayload.SceneWidget == nil { + break + } + + return e.complexity.UpdateWidgetPayload.SceneWidget(childComplexity), true + + case "UpgradePluginPayload.scene": + if e.complexity.UpgradePluginPayload.Scene == nil { + break + } + + return e.complexity.UpgradePluginPayload.Scene(childComplexity), true + + case "UpgradePluginPayload.scenePlugin": + if e.complexity.UpgradePluginPayload.ScenePlugin == nil { + break + } + + return e.complexity.UpgradePluginPayload.ScenePlugin(childComplexity), true + + case "UploadPluginPayload.plugin": + if e.complexity.UploadPluginPayload.Plugin == nil { + break + } + + return e.complexity.UploadPluginPayload.Plugin(childComplexity), true + + case "UploadPluginPayload.scene": + if e.complexity.UploadPluginPayload.Scene == nil { + break + } + + return e.complexity.UploadPluginPayload.Scene(childComplexity), true + + case "UploadPluginPayload.scenePlugin": + if e.complexity.UploadPluginPayload.ScenePlugin == nil { + break + } + + return e.complexity.UploadPluginPayload.ScenePlugin(childComplexity), true + + case "User.email": + if e.complexity.User.Email == nil { + break + } + + return e.complexity.User.Email(childComplexity), true + + case "User.id": + if e.complexity.User.ID == nil { + break + } + + return e.complexity.User.ID(childComplexity), true + + case "User.name": + if e.complexity.User.Name == nil { + break + } + + return e.complexity.User.Name(childComplexity), true + + case "WidgetAlignSystem.inner": + if e.complexity.WidgetAlignSystem.Inner == nil { + break + } + + return e.complexity.WidgetAlignSystem.Inner(childComplexity), true + + case "WidgetAlignSystem.outer": + if e.complexity.WidgetAlignSystem.Outer == nil { + break + } + + return e.complexity.WidgetAlignSystem.Outer(childComplexity), true + + case "WidgetArea.align": + if e.complexity.WidgetArea.Align == nil { + break + } + + return e.complexity.WidgetArea.Align(childComplexity), true + + case "WidgetArea.widgetIds": + if e.complexity.WidgetArea.WidgetIds == nil { + break + } + + return e.complexity.WidgetArea.WidgetIds(childComplexity), true + + case "WidgetExtendable.horizontally": + if e.complexity.WidgetExtendable.Horizontally == nil { + break + } + + return e.complexity.WidgetExtendable.Horizontally(childComplexity), true + + case "WidgetExtendable.vertically": + if e.complexity.WidgetExtendable.Vertically == nil { + break + } + + return e.complexity.WidgetExtendable.Vertically(childComplexity), true + + case "WidgetLayout.defaultLocation": + if e.complexity.WidgetLayout.DefaultLocation == nil { + break + } + + return e.complexity.WidgetLayout.DefaultLocation(childComplexity), true + + case "WidgetLayout.extendable": + if e.complexity.WidgetLayout.Extendable == nil { + break + } + + return e.complexity.WidgetLayout.Extendable(childComplexity), true + + case "WidgetLayout.extended": + if e.complexity.WidgetLayout.Extended == nil { + break + } + + return e.complexity.WidgetLayout.Extended(childComplexity), true + + case "WidgetLayout.floating": + if e.complexity.WidgetLayout.Floating == nil { + break + } + + return e.complexity.WidgetLayout.Floating(childComplexity), true + + case "WidgetLocation.area": + if e.complexity.WidgetLocation.Area == nil { + break + } + + return e.complexity.WidgetLocation.Area(childComplexity), true + + case "WidgetLocation.section": + if e.complexity.WidgetLocation.Section == nil { + break + } + + return e.complexity.WidgetLocation.Section(childComplexity), true + + case "WidgetLocation.zone": + if e.complexity.WidgetLocation.Zone == nil { + break + } + + return e.complexity.WidgetLocation.Zone(childComplexity), true + + case "WidgetSection.bottom": + if e.complexity.WidgetSection.Bottom == nil { + break + } + + return e.complexity.WidgetSection.Bottom(childComplexity), true + + case "WidgetSection.middle": + if e.complexity.WidgetSection.Middle == nil { + break + } + + return e.complexity.WidgetSection.Middle(childComplexity), true + + case "WidgetSection.top": + if e.complexity.WidgetSection.Top == nil { + break + } + + return e.complexity.WidgetSection.Top(childComplexity), true + + case "WidgetZone.center": + if e.complexity.WidgetZone.Center == nil { + break + } + + return e.complexity.WidgetZone.Center(childComplexity), true + + case "WidgetZone.left": + if e.complexity.WidgetZone.Left == nil { + break + } + + return e.complexity.WidgetZone.Left(childComplexity), true + + case "WidgetZone.right": + if e.complexity.WidgetZone.Right == nil { + break + } + + return e.complexity.WidgetZone.Right(childComplexity), true + + } + return 0, false +} + +func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler { + rc := graphql.GetOperationContext(ctx) + ec := executionContext{rc, e} + inputUnmarshalMap := graphql.BuildUnmarshalerMap( + ec.unmarshalInputAddClusterInput, + ec.unmarshalInputAddDatasetSchemaInput, + ec.unmarshalInputAddDynamicDatasetInput, + ec.unmarshalInputAddDynamicDatasetSchemaInput, + ec.unmarshalInputAddInfoboxFieldInput, + ec.unmarshalInputAddLayerGroupInput, + ec.unmarshalInputAddLayerItemInput, + ec.unmarshalInputAddMemberToTeamInput, + ec.unmarshalInputAddPropertyItemInput, + ec.unmarshalInputAddWidgetInput, + ec.unmarshalInputAttachTagItemToGroupInput, + ec.unmarshalInputAttachTagToLayerInput, + ec.unmarshalInputCreateAssetInput, + ec.unmarshalInputCreateInfoboxInput, + ec.unmarshalInputCreateProjectInput, + ec.unmarshalInputCreateSceneInput, + ec.unmarshalInputCreateTagGroupInput, + ec.unmarshalInputCreateTagItemInput, + ec.unmarshalInputCreateTeamInput, + ec.unmarshalInputDeleteMeInput, + ec.unmarshalInputDeleteProjectInput, + ec.unmarshalInputDeleteTeamInput, + ec.unmarshalInputDetachTagFromLayerInput, + ec.unmarshalInputDetachTagItemFromGroupInput, + ec.unmarshalInputImportDatasetFromGoogleSheetInput, + ec.unmarshalInputImportDatasetInput, + ec.unmarshalInputImportLayerInput, + ec.unmarshalInputInstallPluginInput, + ec.unmarshalInputLinkDatasetToPropertyValueInput, + ec.unmarshalInputMoveInfoboxFieldInput, + ec.unmarshalInputMoveLayerInput, + ec.unmarshalInputMovePropertyItemInput, + ec.unmarshalInputPagination, + ec.unmarshalInputPublishProjectInput, + ec.unmarshalInputRemoveAssetInput, + ec.unmarshalInputRemoveClusterInput, + ec.unmarshalInputRemoveDatasetSchemaInput, + ec.unmarshalInputRemoveInfoboxFieldInput, + ec.unmarshalInputRemoveInfoboxInput, + ec.unmarshalInputRemoveLayerInput, + ec.unmarshalInputRemoveMemberFromTeamInput, + ec.unmarshalInputRemoveMyAuthInput, + ec.unmarshalInputRemovePropertyFieldInput, + ec.unmarshalInputRemovePropertyItemInput, + ec.unmarshalInputRemoveTagInput, + ec.unmarshalInputRemoveWidgetInput, + ec.unmarshalInputSignupInput, + ec.unmarshalInputSyncDatasetInput, + ec.unmarshalInputUninstallPluginInput, + ec.unmarshalInputUnlinkPropertyValueInput, + ec.unmarshalInputUpdateClusterInput, + ec.unmarshalInputUpdateDatasetSchemaInput, + ec.unmarshalInputUpdateLayerInput, + ec.unmarshalInputUpdateMeInput, + ec.unmarshalInputUpdateMemberOfTeamInput, + ec.unmarshalInputUpdateProjectInput, + ec.unmarshalInputUpdatePropertyItemInput, + ec.unmarshalInputUpdatePropertyItemOperationInput, + ec.unmarshalInputUpdatePropertyValueInput, + ec.unmarshalInputUpdateTagInput, + ec.unmarshalInputUpdateTeamInput, + ec.unmarshalInputUpdateWidgetAlignSystemInput, + ec.unmarshalInputUpdateWidgetInput, + ec.unmarshalInputUpgradePluginInput, + ec.unmarshalInputUploadFileToPropertyInput, + ec.unmarshalInputUploadPluginInput, + ec.unmarshalInputWidgetLocationInput, + ) + first := true + + switch rc.Operation.Operation { + case ast.Query: + return func(ctx context.Context) *graphql.Response { + if !first { + return nil + } + first = false + ctx = graphql.WithUnmarshalerMap(ctx, inputUnmarshalMap) + data := ec._Query(ctx, rc.Operation.SelectionSet) + var buf bytes.Buffer + data.MarshalGQL(&buf) + + return &graphql.Response{ + Data: buf.Bytes(), + } + } + case ast.Mutation: + return func(ctx context.Context) *graphql.Response { + if !first { + return nil + } + first = false + ctx = graphql.WithUnmarshalerMap(ctx, inputUnmarshalMap) + data := ec._Mutation(ctx, rc.Operation.SelectionSet) + var buf bytes.Buffer + data.MarshalGQL(&buf) + + return &graphql.Response{ + Data: buf.Bytes(), + } + } + + default: + return graphql.OneShot(graphql.ErrorResponse(ctx, "unsupported GraphQL operation")) + } +} + +type executionContext struct { + *graphql.OperationContext + *executableSchema +} + +func (ec *executionContext) introspectSchema() (*introspection.Schema, error) { + if ec.DisableIntrospection { + return nil, errors.New("introspection disabled") + } + return introspection.WrapSchema(parsedSchema), nil +} + +func (ec *executionContext) introspectType(name string) (*introspection.Type, error) { + if ec.DisableIntrospection { + return nil, errors.New("introspection disabled") + } + return introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name]), nil +} + +var sources = []*ast.Source{ + {Name: "schema.graphql", Input: `# Built-in + +scalar Upload +scalar Any + +directive @goModel( + model: String + models: [String!] +) on OBJECT | INPUT_OBJECT | SCALAR | ENUM | INTERFACE | UNION + +directive @goField( + forceResolver: Boolean + name: String +) on INPUT_FIELD_DEFINITION | FIELD_DEFINITION + +# Meta Type + +scalar Cursor + +interface Node { + id: ID! +} + +enum NodeType { + ASSET + USER + TEAM + PROJECT + PLUGIN + SCENE + PROPERTY_SCHEMA + PROPERTY + DATASET_SCHEMA + DATASET + LAYER_GROUP + LAYER_ITEM +} + +type PageInfo { + startCursor: Cursor + endCursor: Cursor + hasNextPage: Boolean! + hasPreviousPage: Boolean! +} + +# Basic types + +scalar DateTime +scalar URL +scalar Lang +scalar FileSize +scalar TranslatedString + +type LatLng { + lat: Float! + lng: Float! +} + +type LatLngHeight { + lat: Float! + lng: Float! + height: Float! +} + +type Camera { + lat: Float! + lng: Float! + altitude: Float! + heading: Float! + pitch: Float! + roll: Float! + fov: Float! +} + +type Typography { + fontFamily: String + fontWeight: String + fontSize: Int + color: String + textAlign: TextAlign + bold: Boolean + italic: Boolean + underline: Boolean +} + +type Rect { + west: Float! + south: Float! + east: Float! + north: Float! +} + +input Pagination{ + first: Int + last: Int + after: Cursor + before: Cursor +} + +enum TextAlign { + LEFT + CENTER + RIGHT + JUSTIFY + JUSTIFY_ALL +} + +enum ValueType { + BOOL + NUMBER + STRING + REF + URL + LATLNG + LATLNGHEIGHT + CAMERA + TYPOGRAPHY + COORDINATES + POLYGON + RECT +} + +enum ListOperation { + ADD + MOVE + REMOVE +} + +enum Theme { + DEFAULT + LIGHT + DARK +} + +# Asset + +type Asset implements Node { + id: ID! + createdAt: DateTime! + teamId: ID! + name: String! + size: FileSize! + url: String! + contentType: String! + team: Team @goField(forceResolver: true) +} + +enum AssetSortType { + DATE + SIZE + NAME +} + +# User + +type User implements Node { + id: ID! + name: String! + email: String! +} + +type Me { + id: ID! + name: String! + email: String! + lang: Lang! + theme: Theme! + myTeamId: ID! + auths: [String!]! + teams: [Team!]! @goField(forceResolver: true) + myTeam: Team! @goField(forceResolver: true) +} + +type ProjectAliasAvailability { + alias: String! + available: Boolean! +} + +type Team implements Node { + id: ID! + name: String! + members: [TeamMember!]! + personal: Boolean! + assets( + first: Int + last: Int + after: Cursor + before: Cursor + ): AssetConnection! @goField(forceResolver: true) + projects( + includeArchived: Boolean + first: Int + last: Int + after: Cursor + before: Cursor + ): ProjectConnection! @goField(forceResolver: true) +} + +type TeamMember { + userId: ID! + role: Role! + user: User @goField(forceResolver: true) +} + +enum Role { + # a role who can read project + READER + # a role who can read and write project + WRITER + # a eole who can have full controll of project + OWNER +} + +# Project + +type Project implements Node { + id: ID! + isArchived: Boolean! + isBasicAuthActive: Boolean! + basicAuthUsername: String! + basicAuthPassword: String! + createdAt: DateTime! + updatedAt: DateTime! + publishedAt: DateTime + name: String! + description: String! + alias: String! + publicTitle: String! + publicDescription: String! + publicImage: String! + publicNoIndex: Boolean! + imageUrl: URL + teamId: ID! + visualizer: Visualizer! + publishmentStatus: PublishmentStatus! + team: Team @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) +} + +enum Visualizer { + CESIUM +} + +enum PublishmentStatus { + PUBLIC + LIMITED + PRIVATE +} + +# Plugin + +type Plugin { + id: ID! + sceneId: ID + name: String! + version: String! + description: String! + author: String! + repositoryUrl: String! + propertySchemaId: ID + extensions: [PluginExtension!]! + scenePlugin(sceneId: ID): ScenePlugin + allTranslatedDescription: TranslatedString + allTranslatedName: TranslatedString + scene: Scene @goField(forceResolver: true) + translatedName(lang: Lang): String! @goField(forceResolver: true) + translatedDescription(lang: Lang): String! @goField(forceResolver: true) + propertySchema: PropertySchema @goField(forceResolver: true) +} + +enum WidgetAreaAlign { + START + CENTERED + END +} + +enum WidgetZoneType { + INNER + OUTER +} + +enum WidgetSectionType { + LEFT + CENTER + RIGHT +} + +enum WidgetAreaType { + TOP + MIDDLE + BOTTOM +} + +type WidgetLocation { + zone: WidgetZoneType! + section: WidgetSectionType! + area: WidgetAreaType! +} + +type WidgetExtendable { + vertically: Boolean! + horizontally: Boolean! +} + +type WidgetLayout { + extendable: WidgetExtendable! + extended: Boolean! + floating: Boolean! + defaultLocation: WidgetLocation +} + +enum PluginExtensionType { + PRIMITIVE + WIDGET + BLOCK + VISUALIZER + INFOBOX +} + +type PluginExtension { + extensionId: ID! + pluginId: ID! + type: PluginExtensionType! + name: String! + description: String! + icon: String! + singleOnly: Boolean + widgetLayout: WidgetLayout + visualizer: Visualizer + propertySchemaId: ID! + allTranslatedName: TranslatedString + allTranslatedDescription: TranslatedString + plugin: Plugin @goField(forceResolver: true) + sceneWidget(sceneId: ID!): SceneWidget @goField(forceResolver: true) + propertySchema: PropertySchema @goField(forceResolver: true) + translatedName(lang: Lang): String! @goField(forceResolver: true) + translatedDescription(lang: Lang): String! @goField(forceResolver: true) +} + +# Scene + +type Scene implements Node { + id: ID! + projectId: ID! + teamId: ID! + propertyId: ID! + createdAt: DateTime! + updatedAt: DateTime! + rootLayerId: ID! + widgets: [SceneWidget!]! + plugins: [ScenePlugin!]! + widgetAlignSystem: WidgetAlignSystem + dynamicDatasetSchemas: [DatasetSchema!]! + project: Project @goField(forceResolver: true) + team: Team @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + rootLayer: LayerGroup @goField(forceResolver: true) + datasetSchemas( + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetSchemaConnection! @goField(forceResolver: true) + tagIds: [ID!]! + tags: [Tag!]! @goField(forceResolver: true) + clusters: [Cluster!]! +} + +type SceneWidget { + id: ID! + pluginId: ID! + extensionId: ID! + propertyId: ID! + enabled: Boolean! + extended: Boolean! + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + property: Property @goField(forceResolver: true) +} + +type ScenePlugin { + pluginId: ID! + propertyId: ID + plugin: Plugin @goField(forceResolver: true) + property: Property @goField(forceResolver: true) +} + +type WidgetAlignSystem { + inner: WidgetZone + outer: WidgetZone +} + +type WidgetZone { + left: WidgetSection + center: WidgetSection + right: WidgetSection +} + +type WidgetSection { + top: WidgetArea + middle: WidgetArea + bottom: WidgetArea +} + +type WidgetArea { + widgetIds: [ID!]! + align: WidgetAreaAlign! +} + +# Property + +type PropertySchema { + id: ID! + groups: [PropertySchemaGroup!]! + linkableFields: PropertyLinkableFields! +} + +type PropertyLinkableFields { + schemaId: ID! + latlng: ID + url: ID + latlngField: PropertySchemaField @goField(forceResolver: true) + urlField: PropertySchemaField @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) +} + +type PropertySchemaGroup { + schemaGroupId: ID! + schemaId: ID! + fields: [PropertySchemaField!]! + isList: Boolean! + isAvailableIf: PropertyCondition + title: String + allTranslatedTitle: TranslatedString + representativeFieldId: ID + representativeField: PropertySchemaField + schema: PropertySchema @goField(forceResolver: true) + translatedTitle(lang: Lang): String! @goField(forceResolver: true) +} + +type PropertySchemaField { + fieldId: ID! + type: ValueType! + title: String! + description: String! + prefix: String + suffix: String + defaultValue: Any + ui: PropertySchemaFieldUI + min: Float + max: Float + choices: [PropertySchemaFieldChoice!] + isAvailableIf: PropertyCondition + allTranslatedTitle: TranslatedString + allTranslatedDescription: TranslatedString + translatedTitle(lang: Lang): String! @goField(forceResolver: true) + translatedDescription(lang: Lang): String! @goField(forceResolver: true) +} + +enum PropertySchemaFieldUI { + LAYER + MULTILINE + SELECTION + COLOR + RANGE + SLIDER + IMAGE + VIDEO + FILE + CAMERA_POSE + DATETIME +} + +type PropertySchemaFieldChoice { + key: String! + title: String! + icon: String + allTranslatedTitle: TranslatedString + translatedTitle(lang: Lang): String! @goField(forceResolver: true) +} + +type PropertyCondition { + fieldId: ID! + type: ValueType! + value: Any +} + +type Property implements Node { + id: ID! + schemaId: ID! + items: [PropertyItem!]! + schema: PropertySchema @goField(forceResolver: true) + layer: Layer @goField(forceResolver: true) + merged: MergedProperty @goField(forceResolver: true) +} + +union PropertyItem = PropertyGroup | PropertyGroupList + +type PropertyGroup { + id: ID! + schemaId: ID! + schemaGroupId: ID! + fields: [PropertyField!]! + schema: PropertySchema @goField(forceResolver: true) + schemaGroup: PropertySchemaGroup @goField(forceResolver: true) +} + +type PropertyGroupList { + id: ID! + schemaId: ID! + schemaGroupId: ID! + groups: [PropertyGroup!]! + schema: PropertySchema @goField(forceResolver: true) + schemaGroup: PropertySchemaGroup @goField(forceResolver: true) +} + +type PropertyField { + id: String! + parentId: ID! + schemaId: ID! + fieldId: ID! + links: [PropertyFieldLink!] + type: ValueType! + value: Any + parent: Property @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + field: PropertySchemaField @goField(forceResolver: true) + actualValue: Any @goField(forceResolver: true) +} + +type PropertyFieldLink { + datasetId: ID + datasetSchemaId: ID! + datasetSchemaFieldId: ID! + dataset: Dataset @goField(forceResolver: true) + datasetField: DatasetField @goField(forceResolver: true) + datasetSchema: DatasetSchema @goField(forceResolver: true) + datasetSchemaField: DatasetSchemaField @goField(forceResolver: true) +} + +type MergedProperty { + originalId: ID + parentId: ID + # note: schemaId will not always be set + schemaId: ID + linkedDatasetId: ID + original: Property @goField(forceResolver: true) + parent: Property @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + groups: [MergedPropertyGroup!]! @goField(forceResolver: true) +} + +type MergedPropertyGroup { + originalPropertyId: ID + parentPropertyId: ID + originalId: ID + parentId: ID + schemaGroupId: ID! + # note: schemaId will not always be set + schemaId: ID + linkedDatasetId: ID + fields: [MergedPropertyField!]! + groups: [MergedPropertyGroup!]! + originalProperty: Property @goField(forceResolver: true) + parentProperty: Property @goField(forceResolver: true) + original: PropertyGroup @goField(forceResolver: true) + parent: PropertyGroup @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) +} + +type MergedPropertyField { + schemaId: ID! + fieldId: ID! + value: Any + type: ValueType! + links: [PropertyFieldLink!] + overridden: Boolean! + schema: PropertySchema @goField(forceResolver: true) + field: PropertySchemaField @goField(forceResolver: true) + actualValue: Any @goField(forceResolver: true) +} + +# Dataset + +type DatasetSchema implements Node { + id: ID! + source: String! + name: String! + sceneId: ID! + fields: [DatasetSchemaField!]! + totalCount: Int! + representativeFieldId: ID + dynamic: Boolean + datasets( + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetConnection! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + representativeField: DatasetSchemaField @goField(forceResolver: true) +} + +type DatasetSchemaField implements Node { + id: ID! + source: String! + name: String! + type: ValueType! + schemaId: ID! + refId: ID + schema: DatasetSchema @goField(forceResolver: true) + ref: DatasetSchema @goField(forceResolver: true) +} + +type Dataset implements Node { + id: ID! + source: String! + schemaId: ID! + fields: [DatasetField!]! + schema: DatasetSchema @goField(forceResolver: true) + name: String @goField(forceResolver: true) +} + +type DatasetField { + fieldId: ID! + schemaId: ID! + source: String! + type: ValueType! + value: Any + schema: DatasetSchema @goField(forceResolver: true) + field: DatasetSchemaField @goField(forceResolver: true) + valueRef: Dataset @goField(forceResolver: true) +} + +# Layer + +interface Layer { + id: ID! + sceneId: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: ID + extensionId: ID + infobox: Infobox + # parentId will not be always set + parentId: ID + parent: LayerGroup + property: Property + plugin: Plugin + extension: PluginExtension + scenePlugin: ScenePlugin + tags: [LayerTag!]! +} + +enum LayerEncodingFormat { + KML + CZML + GEOJSON + SHAPE + REEARTH +} + +type LayerItem implements Layer { + id: ID! + sceneId: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: ID + extensionId: ID + infobox: Infobox + # parentId will not be always set + parentId: ID + linkedDatasetId: ID + tags: [LayerTag!]! + parent: LayerGroup @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedLayer @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) +} + +type LayerGroup implements Layer { + id: ID! + sceneId: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: ID + extensionId: ID + infobox: Infobox + # parentId will not be always set + parentId: ID + linkedDatasetSchemaId: ID + root: Boolean! + layerIds: [ID!]! + tags: [LayerTag!]! + parent: LayerGroup @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) + layers: [Layer]! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) +} + +type Infobox { + sceneId: ID! + layerId: ID! + propertyId: ID! + fields: [InfoboxField!]! + linkedDatasetId: ID + layer: Layer! @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedInfobox @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) +} + +type InfoboxField { + id: ID! + sceneId: ID! + layerId: ID! + propertyId: ID! + pluginId: ID! + extensionId: ID! + linkedDatasetId: ID + layer: Layer! @goField(forceResolver: true) + infobox: Infobox! @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedInfoboxField @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) +} + +interface LayerTag { + tagId: ID! + tag: Tag +} + +type LayerTagItem implements LayerTag { + tagId: ID! + tag: Tag @goField(forceResolver: true) +} + +type LayerTagGroup implements LayerTag { + tagId: ID! + children: [LayerTagItem!]! + tag: Tag @goField(forceResolver: true) +} + +type MergedLayer { + originalId: ID! + parentId: ID + sceneID: ID! + property: MergedProperty + infobox: MergedInfobox + original: LayerItem @goField(forceResolver: true) + parent: LayerGroup @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) +} + +type MergedInfobox { + sceneID: ID! + property: MergedProperty + fields: [MergedInfoboxField!]! + scene: Scene @goField(forceResolver: true) +} + +type MergedInfoboxField { + originalId: ID! + sceneID: ID! + pluginId: ID! + extensionId: ID! + property: MergedProperty + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) +} + +interface Tag { + id: ID! + sceneId: ID! + label: String! + layers: [Layer!]! @goField(forceResolver: true) +} + +type TagItem implements Tag { + id: ID! + sceneId: ID! + label: String! + parentId: ID + linkedDatasetID: ID + linkedDatasetSchemaID: ID + linkedDatasetFieldID: ID + linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + linkedDatasetField: DatasetField @goField(forceResolver: true) + parent: TagGroup @goField(forceResolver: true) + layers: [Layer!]! @goField(forceResolver: true) +} + +type TagGroup implements Tag { + id: ID! + sceneId: ID! + label: String! + tagIds: [ID!] + tags: [TagItem!]! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + layers: [Layer!]! @goField(forceResolver: true) +} + +type Cluster { + id: ID! + name: String! + propertyId: ID! + property: Property @goField(forceResolver: true) +} + +# InputType + +input CreateAssetInput { + teamId: ID! + file: Upload! +} + +input RemoveAssetInput { + assetId: ID! +} + +input SignupInput { + lang: Lang + theme: Theme + userId: ID + teamId: ID + secret: String +} + +input UpdateMeInput { + name: String + email: String + lang: Lang + theme: Theme + password: String + passwordConfirmation: String +} + +input RemoveMyAuthInput { + auth: String! +} + +input DeleteMeInput { + userId: ID! +} + +input CreateTeamInput { + name: String! +} + +input UpdateTeamInput { + teamId: ID! + name: String! +} + +input AddMemberToTeamInput { + teamId: ID! + userId: ID! + role: Role! +} + +input RemoveMemberFromTeamInput { + teamId: ID! + userId: ID! +} + +input UpdateMemberOfTeamInput { + teamId: ID! + userId: ID! + role: Role! +} + +input DeleteTeamInput { + teamId: ID! +} + +input CreateProjectInput { + teamId: ID! + visualizer: Visualizer! + name: String + description: String + imageUrl: URL + alias: String + archived: Boolean +} + +input UpdateProjectInput { + projectId: ID! + name: String + description: String + archived: Boolean + isBasicAuthActive: Boolean + basicAuthUsername: String + basicAuthPassword: String + alias: String + imageUrl: URL + publicTitle: String + publicDescription: String + publicImage: String + publicNoIndex: Boolean + deleteImageUrl: Boolean + deletePublicImage: Boolean +} + +input UploadPluginInput { + sceneId: ID! + file: Upload + url: URL +} + +input CreateSceneInput { + projectId: ID! +} + +input PublishProjectInput { + projectId: ID! + alias: String + status: PublishmentStatus! +} + +input DeleteProjectInput { + projectId: ID! +} + +input WidgetLocationInput { + zone: WidgetZoneType! + section: WidgetSectionType! + area: WidgetAreaType! +} + +input AddWidgetInput { + sceneId: ID! + pluginId: ID! + extensionId: ID! +} + +input UpdateWidgetInput { + sceneId: ID! + widgetId: ID! + enabled: Boolean + location: WidgetLocationInput + extended: Boolean + index: Int +} + +input UpdateWidgetAlignSystemInput { + sceneId: ID! + location: WidgetLocationInput! + align: WidgetAreaAlign +} + +input RemoveWidgetInput { + sceneId: ID! + widgetId: ID! +} + +input InstallPluginInput { + sceneId: ID! + pluginId: ID! +} + +input UninstallPluginInput { + sceneId: ID! + pluginId: ID! +} + +input UpgradePluginInput { + sceneId: ID! + pluginId: ID! + toPluginId: ID! +} + +input SyncDatasetInput { + sceneId: ID! + url: String! +} + +input UpdatePropertyValueInput { + propertyId: ID! + schemaGroupId: ID + itemId: ID + fieldId: ID! + value: Any + type: ValueType! +} + +input RemovePropertyFieldInput { + propertyId: ID! + schemaGroupId: ID + itemId: ID + fieldId: ID! +} + +input UploadFileToPropertyInput { + propertyId: ID! + schemaGroupId: ID + itemId: ID + fieldId: ID! + file: Upload! +} + +input LinkDatasetToPropertyValueInput { + propertyId: ID! + schemaGroupId: ID + itemId: ID + fieldId: ID! + datasetSchemaIds: [ID!]! + datasetSchemaFieldIds: [ID!]! + datasetIds: [ID!] +} + +input UnlinkPropertyValueInput { + propertyId: ID! + schemaGroupId: ID + itemId: ID + fieldId: ID! +} + +input AddPropertyItemInput { + propertyId: ID! + schemaGroupId: ID! + index: Int + nameFieldValue: Any + nameFieldType: ValueType +} + +input MovePropertyItemInput { + propertyId: ID! + schemaGroupId: ID! + itemId: ID! + index: Int! +} + +input RemovePropertyItemInput { + propertyId: ID! + schemaGroupId: ID! + itemId: ID! +} + +input UpdatePropertyItemInput { + propertyId: ID! + schemaGroupId: ID! + operations: [UpdatePropertyItemOperationInput!]! +} + +input UpdatePropertyItemOperationInput { + operation: ListOperation! + itemId: ID + index: Int + nameFieldValue: Any + nameFieldType: ValueType +} + +input AddLayerItemInput { + parentLayerId: ID! + pluginId: ID! + extensionId: ID! + index: Int + name: String + lat: Float + lng: Float +} + +input AddLayerGroupInput { + parentLayerId: ID! + pluginId: ID + extensionId: ID + index: Int + linkedDatasetSchemaID: ID + name: String + representativeFieldId: ID +} + +input RemoveLayerInput { + layerId: ID! +} + +input UpdateLayerInput { + layerId: ID! + name: String + visible: Boolean +} + +input MoveLayerInput { + layerId: ID! + destLayerId: ID + index: Int +} + +input CreateInfoboxInput { + layerId: ID! +} + +input RemoveInfoboxInput { + layerId: ID! +} + +input AddInfoboxFieldInput { + layerId: ID! + pluginId: ID! + extensionId: ID! + index: Int +} + +input MoveInfoboxFieldInput { + layerId: ID! + infoboxFieldId: ID! + index: Int! +} + +input RemoveInfoboxFieldInput { + layerId: ID! + infoboxFieldId: ID! +} + +input UpdateDatasetSchemaInput { + schemaId: ID! + name: String! +} + +input AddDynamicDatasetSchemaInput { + sceneId: ID! +} + +input AddDynamicDatasetInput { + datasetSchemaId: ID! + author: String! + content: String! + lat: Float + lng: Float + target: String +} + +input RemoveDatasetSchemaInput { + schemaId: ID! + force: Boolean +} + +input ImportLayerInput { + layerId: ID! + file: Upload! + format: LayerEncodingFormat! +} + +input ImportDatasetInput { + file: Upload! + sceneId: ID! + datasetSchemaId: ID +} + +input ImportDatasetFromGoogleSheetInput { + accessToken: String! + fileId: String! + sheetName: String! + sceneId: ID! + datasetSchemaId: ID +} + +input AddDatasetSchemaInput { + sceneId: ID! + name: String! + representativefield: ID +} + +input CreateTagItemInput { + sceneId: ID! + label: String! + parent: ID + linkedDatasetSchemaID: ID + linkedDatasetID: ID + linkedDatasetField: ID +} + +input CreateTagGroupInput { + sceneId: ID! + label: String! + tags: [ID!] +} + +input UpdateTagInput { + tagId: ID! + sceneId: ID! + label: String +} + +input AttachTagItemToGroupInput { + itemID: ID! + groupID: ID! +} + +input DetachTagItemFromGroupInput { + itemID: ID! + groupID: ID! +} + +input AttachTagToLayerInput { + tagID: ID! + layerID: ID! +} + +input DetachTagFromLayerInput { + tagID: ID! + layerID: ID! +} + +input RemoveTagInput { + tagID: ID! +} + +input AddClusterInput { + sceneId: ID! + name: String! +} + +input UpdateClusterInput { + clusterId: ID! + sceneId: ID! + name: String + propertyId: ID +} + +input RemoveClusterInput { + clusterId: ID! + sceneId: ID! +} + +# Payload + +type CreateAssetPayload { + asset: Asset! +} + +type RemoveAssetPayload { + assetId: ID! +} + +type UpdateMePayload { + me: Me! +} + +type SignupPayload { + user: User! + team: Team! +} + +type DeleteMePayload { + userId: ID! +} + +type CreateTeamPayload { + team: Team! +} + +type UpdateTeamPayload { + team: Team! +} + +type AddMemberToTeamPayload { + team: Team! +} + +type RemoveMemberFromTeamPayload { + team: Team! +} + +type UpdateMemberOfTeamPayload { + team: Team! +} + +type DeleteTeamPayload { + teamId: ID! +} + +type ProjectPayload { + project: Project! +} + +type DeleteProjectPayload { + projectId: ID! +} + +type UploadPluginPayload { + plugin: Plugin! + scene: Scene! + scenePlugin: ScenePlugin! +} + +type CreateScenePayload { + scene: Scene! +} + +type AddWidgetPayload { + scene: Scene! + sceneWidget: SceneWidget! +} + +type UpdateWidgetPayload { + scene: Scene! + sceneWidget: SceneWidget! +} + +type UpdateWidgetAlignSystemPayload { + scene: Scene! +} + +type RemoveWidgetPayload { + scene: Scene! + widgetId: ID! +} + +type InstallPluginPayload { + scene: Scene! + scenePlugin: ScenePlugin! +} + +type UninstallPluginPayload { + pluginId: ID! + scene: Scene! +} + +type UpgradePluginPayload { + scene: Scene! + scenePlugin: ScenePlugin! +} + +type SyncDatasetPayload { + sceneId: ID! + url: String! + datasetSchema: [DatasetSchema!]! + dataset: [Dataset!]! +} + +type PropertyFieldPayload { + property: Property! + propertyField: PropertyField +} + +type PropertyItemPayload { + property: Property! + propertyItem: PropertyItem +} + +type AddLayerItemPayload { + layer: LayerItem! + parentLayer: LayerGroup! + index: Int +} + +type AddLayerGroupPayload { + layer: LayerGroup! + parentLayer: LayerGroup! + index: Int +} + +type RemoveLayerPayload { + layerId: ID! + parentLayer: LayerGroup! +} + +type UpdateLayerPayload { + layer: Layer! +} + +type MoveLayerPayload { + layerId: ID! + fromParentLayer: LayerGroup! + toParentLayer: LayerGroup! + index: Int! +} + +type CreateInfoboxPayload { + layer: Layer! +} + +type RemoveInfoboxPayload { + layer: Layer! +} + +type AddInfoboxFieldPayload { + infoboxField: InfoboxField! + layer: Layer! +} + +type MoveInfoboxFieldPayload { + infoboxFieldId: ID! + layer: Layer! + index: Int! +} + +type RemoveInfoboxFieldPayload { + infoboxFieldId: ID! + layer: Layer! +} + +type UpdateDatasetSchemaPayload { + datasetSchema: DatasetSchema +} + +type RemoveDatasetSchemaPayload { + schemaId: ID! +} + +type AddDynamicDatasetSchemaPayload { + datasetSchema: DatasetSchema +} + +type AddDynamicDatasetPayload { + datasetSchema: DatasetSchema + dataset: Dataset +} + +type ImportLayerPayload { + layers: [Layer!]! + parentLayer: LayerGroup! +} + +type ImportDatasetPayload { + datasetSchema: DatasetSchema! +} + +type AddDatasetSchemaPayload { + datasetSchema: DatasetSchema +} + +type CreateTagItemPayload { + tag: TagItem! + parent: TagGroup +} + +type CreateTagGroupPayload { + tag: TagGroup! +} + +type AttachTagItemToGroupPayload { + tag: TagGroup! +} + +type DetachTagItemFromGroupPayload { + tag: TagGroup! +} + +type UpdateTagPayload { + tag: Tag! +} + +type AttachTagToLayerPayload { + layer: Layer! +} + +type DetachTagFromLayerPayload { + layer: Layer! +} + +type RemoveTagPayload { + tagId: ID! + updatedLayers: [Layer!]! +} + +type AddClusterPayload { + scene: Scene! + cluster: Cluster! +} + +type UpdateClusterPayload { + scene: Scene! + cluster: Cluster! +} + +type RemoveClusterPayload { + scene: Scene! + clusterId: ID! +} + +# Connection + +type AssetConnection { + edges: [AssetEdge!]! + nodes: [Asset]! + pageInfo: PageInfo! + totalCount: Int! +} + +type AssetEdge { + cursor: Cursor! + node: Asset +} + +type ProjectConnection { + edges: [ProjectEdge!]! + nodes: [Project]! + pageInfo: PageInfo! + totalCount: Int! +} + +type ProjectEdge { + cursor: Cursor! + node: Project +} + +type DatasetSchemaConnection { + edges: [DatasetSchemaEdge!]! + nodes: [DatasetSchema]! + pageInfo: PageInfo! + totalCount: Int! +} + +type DatasetSchemaEdge { + cursor: Cursor! + node: DatasetSchema +} + +type DatasetConnection { + edges: [DatasetEdge!]! + nodes: [Dataset]! + pageInfo: PageInfo! + totalCount: Int! +} + +type DatasetEdge { + cursor: Cursor! + node: Dataset +} + +# Query + +type Query { + me: Me + node(id: ID!, type: NodeType!): Node + nodes(id: [ID!]!, type: NodeType!): [Node]! + propertySchema(id: ID!): PropertySchema + propertySchemas(id: [ID!]!): [PropertySchema!]! + plugin(id: ID!): Plugin + plugins(id: [ID!]!): [Plugin!]! + layer(id: ID!): Layer + scene(projectId: ID!): Scene + assets( + teamId: ID! + keyword: String + sort: AssetSortType + pagination: Pagination + ): AssetConnection! + projects( + teamId: ID! + includeArchived: Boolean + first: Int + last: Int + after: Cursor + before: Cursor + ): ProjectConnection! + datasetSchemas( + sceneId: ID! + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetSchemaConnection! + datasets( + datasetSchemaId: ID! + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetConnection! + dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! + searchUser(nameOrEmail: String!): User + checkProjectAlias(alias: String!): ProjectAliasAvailability! +} + +# Mutation + +type Mutation { + # Asset + createAsset(input: CreateAssetInput!): CreateAssetPayload + removeAsset(input: RemoveAssetInput!): RemoveAssetPayload + + # User + signup(input: SignupInput!): SignupPayload + updateMe(input: UpdateMeInput!): UpdateMePayload + removeMyAuth(input: RemoveMyAuthInput!): UpdateMePayload + deleteMe(input: DeleteMeInput!): DeleteMePayload + + # Team + createTeam(input: CreateTeamInput!): CreateTeamPayload + deleteTeam(input: DeleteTeamInput!): DeleteTeamPayload + updateTeam(input: UpdateTeamInput!): UpdateTeamPayload + addMemberToTeam(input: AddMemberToTeamInput!): AddMemberToTeamPayload + removeMemberFromTeam( + input: RemoveMemberFromTeamInput! + ): RemoveMemberFromTeamPayload + updateMemberOfTeam(input: UpdateMemberOfTeamInput!): UpdateMemberOfTeamPayload + + # Project + createProject(input: CreateProjectInput!): ProjectPayload + updateProject(input: UpdateProjectInput!): ProjectPayload + publishProject(input: PublishProjectInput!): ProjectPayload + deleteProject(input: DeleteProjectInput!): DeleteProjectPayload + + # Scene + createScene(input: CreateSceneInput!): CreateScenePayload + addWidget(input: AddWidgetInput!): AddWidgetPayload + updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload + updateWidgetAlignSystem( + input: UpdateWidgetAlignSystemInput! + ): UpdateWidgetAlignSystemPayload + removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload + installPlugin(input: InstallPluginInput!): InstallPluginPayload + uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload + uploadPlugin(input: UploadPluginInput!): UploadPluginPayload + upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload + addCluster(input: AddClusterInput!): AddClusterPayload + updateCluster(input: UpdateClusterInput!): UpdateClusterPayload + removeCluster(input: RemoveClusterInput!): RemoveClusterPayload + + # Dataset + updateDatasetSchema( + input: UpdateDatasetSchemaInput! + ): UpdateDatasetSchemaPayload + syncDataset(input: SyncDatasetInput!): SyncDatasetPayload + addDynamicDatasetSchema( + input: AddDynamicDatasetSchemaInput! + ): AddDynamicDatasetSchemaPayload + addDynamicDataset(input: AddDynamicDatasetInput!): AddDynamicDatasetPayload + removeDatasetSchema( + input: RemoveDatasetSchemaInput! + ): RemoveDatasetSchemaPayload + importDataset(input: ImportDatasetInput!): ImportDatasetPayload + importDatasetFromGoogleSheet( + input: ImportDatasetFromGoogleSheetInput! + ): ImportDatasetPayload + addDatasetSchema(input: AddDatasetSchemaInput!): AddDatasetSchemaPayload + + # Property + updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload + removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload + uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload + linkDatasetToPropertyValue( + input: LinkDatasetToPropertyValueInput! + ): PropertyFieldPayload + unlinkPropertyValue(input: UnlinkPropertyValueInput!): PropertyFieldPayload + addPropertyItem(input: AddPropertyItemInput!): PropertyItemPayload + movePropertyItem(input: MovePropertyItemInput!): PropertyItemPayload + removePropertyItem(input: RemovePropertyItemInput!): PropertyItemPayload + updatePropertyItems(input: UpdatePropertyItemInput!): PropertyItemPayload + + # Layer + addLayerItem(input: AddLayerItemInput!): AddLayerItemPayload + addLayerGroup(input: AddLayerGroupInput!): AddLayerGroupPayload + removeLayer(input: RemoveLayerInput!): RemoveLayerPayload + updateLayer(input: UpdateLayerInput!): UpdateLayerPayload + moveLayer(input: MoveLayerInput!): MoveLayerPayload + createInfobox(input: CreateInfoboxInput!): CreateInfoboxPayload + removeInfobox(input: RemoveInfoboxInput!): RemoveInfoboxPayload + addInfoboxField(input: AddInfoboxFieldInput!): AddInfoboxFieldPayload + moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload + removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload + importLayer(input: ImportLayerInput!): ImportLayerPayload + attachTagToLayer(input: AttachTagToLayerInput!): AttachTagToLayerPayload + detachTagFromLayer(input: DetachTagFromLayerInput!): DetachTagFromLayerPayload + + # Tag + createTagItem(input: CreateTagItemInput!): CreateTagItemPayload + createTagGroup(input: CreateTagGroupInput!): CreateTagGroupPayload + attachTagItemToGroup( + input: AttachTagItemToGroupInput! + ): AttachTagItemToGroupPayload + detachTagItemFromGroup( + input: DetachTagItemFromGroupInput! + ): DetachTagItemFromGroupPayload + updateTag(input: UpdateTagInput!): UpdateTagPayload + removeTag(input: RemoveTagInput!): RemoveTagPayload +} + +schema { + query: Query + mutation: Mutation +} +`, BuiltIn: false}, +} +var parsedSchema = gqlparser.MustLoadSchema(sources...) + +// endregion ************************** generated!.gotpl ************************** + +// region ***************************** args.gotpl ***************************** + +func (ec *executionContext) field_DatasetSchema_datasets_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg0, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg1 + var arg2 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg2, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg3 + return args, nil +} + +func (ec *executionContext) field_Mutation_addCluster_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AddClusterInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddClusterInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddClusterInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AddDatasetSchemaInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDatasetSchemaInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addDynamicDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AddDynamicDatasetSchemaInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddDynamicDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetSchemaInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addDynamicDataset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AddDynamicDatasetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddDynamicDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addInfoboxField_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AddInfoboxFieldInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddInfoboxFieldInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addLayerGroup_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AddLayerGroupInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddLayerGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerGroupInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addLayerItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AddLayerItemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddLayerItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerItemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addMemberToTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AddMemberToTeamInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddMemberToTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddMemberToTeamInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addPropertyItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AddPropertyItemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddPropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddPropertyItemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AddWidgetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddWidgetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_attachTagItemToGroup_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AttachTagItemToGroupInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAttachTagItemToGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagItemToGroupInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_attachTagToLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AttachTagToLayerInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAttachTagToLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagToLayerInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createAsset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.CreateAssetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateAssetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createInfobox_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.CreateInfoboxInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateInfoboxInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createProject_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.CreateProjectInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateProjectInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createScene_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.CreateSceneInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateSceneInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateSceneInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createTagGroup_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.CreateTagGroupInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateTagGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagGroupInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createTagItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.CreateTagItemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateTagItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagItemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.CreateTeamInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTeamInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_deleteMe_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.DeleteMeInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNDeleteMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteMeInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_deleteProject_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.DeleteProjectInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNDeleteProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteProjectInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_deleteTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.DeleteTeamInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNDeleteTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteTeamInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_detachTagFromLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.DetachTagFromLayerInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNDetachTagFromLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagFromLayerInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_detachTagItemFromGroup_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.DetachTagItemFromGroupInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNDetachTagItemFromGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagItemFromGroupInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_importDatasetFromGoogleSheet_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ImportDatasetFromGoogleSheetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNImportDatasetFromGoogleSheetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetFromGoogleSheetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_importDataset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ImportDatasetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNImportDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_importLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ImportLayerInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNImportLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportLayerInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_installPlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.InstallPluginInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNInstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInstallPluginInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_linkDatasetToPropertyValue_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.LinkDatasetToPropertyValueInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNLinkDatasetToPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLinkDatasetToPropertyValueInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_moveInfoboxField_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.MoveInfoboxFieldInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNMoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveInfoboxFieldInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_moveLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.MoveLayerInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNMoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveLayerInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_movePropertyItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.MovePropertyItemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNMovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMovePropertyItemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_publishProject_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.PublishProjectInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNPublishProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishProjectInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeAsset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemoveAssetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveAssetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeCluster_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemoveClusterInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveClusterInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveClusterInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemoveDatasetSchemaInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveDatasetSchemaInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeInfoboxField_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemoveInfoboxFieldInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxFieldInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeInfobox_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemoveInfoboxInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemoveLayerInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveLayerInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeMemberFromTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemoveMemberFromTeamInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveMemberFromTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMemberFromTeamInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeMyAuth_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemoveMyAuthInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveMyAuthInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMyAuthInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removePropertyField_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemovePropertyFieldInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemovePropertyFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemovePropertyFieldInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removePropertyItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemovePropertyItemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemovePropertyItemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeTag_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemoveTagInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveTagInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveTagInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemoveWidgetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveWidgetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_signup_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.SignupInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNSignupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSignupInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_syncDataset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.SyncDatasetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNSyncDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSyncDatasetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_uninstallPlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UninstallPluginInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUninstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUninstallPluginInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_unlinkPropertyValue_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UnlinkPropertyValueInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUnlinkPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUnlinkPropertyValueInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateCluster_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateClusterInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateClusterInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateClusterInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateDatasetSchemaInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateDatasetSchemaInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateLayerInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateLayerInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateMe_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateMeInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMeInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateMemberOfTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateMemberOfTeamInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateMemberOfTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMemberOfTeamInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateProject_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateProjectInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateProjectInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updatePropertyItems_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdatePropertyItemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdatePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updatePropertyValue_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdatePropertyValueInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdatePropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyValueInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateTag_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateTagInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateTagInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTagInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateTeamInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTeamInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateWidgetAlignSystem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateWidgetAlignSystemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateWidgetAlignSystemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetAlignSystemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateWidgetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_upgradePlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpgradePluginInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpgradePluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpgradePluginInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_uploadFileToProperty_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UploadFileToPropertyInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUploadFileToPropertyInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadFileToPropertyInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_uploadPlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UploadPluginInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUploadPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadPluginInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PluginExtension_sceneWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ID + if tmp, ok := rawArgs["sceneId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["sceneId"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PluginExtension_translatedDescription_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *language.Tag + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PluginExtension_translatedName_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *language.Tag + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Plugin_scenePlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *gqlmodel.ID + if tmp, ok := rawArgs["sceneId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + arg0, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["sceneId"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Plugin_translatedDescription_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *language.Tag + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Plugin_translatedName_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *language.Tag + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PropertySchemaFieldChoice_translatedTitle_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *language.Tag + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PropertySchemaField_translatedDescription_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *language.Tag + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PropertySchemaField_translatedTitle_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *language.Tag + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PropertySchemaGroup_translatedTitle_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *language.Tag + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query___type_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 string + if tmp, ok := rawArgs["name"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + arg0, err = ec.unmarshalNString2string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["name"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_assets_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ID + if tmp, ok := rawArgs["teamId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["teamId"] = arg0 + var arg1 *string + if tmp, ok := rawArgs["keyword"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("keyword")) + arg1, err = ec.unmarshalOString2แš–string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["keyword"] = arg1 + var arg2 *gqlmodel.AssetSortType + if tmp, ok := rawArgs["sort"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sort")) + arg2, err = ec.unmarshalOAssetSortType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetSortType(ctx, tmp) + if err != nil { + return nil, err + } + } + args["sort"] = arg2 + var arg3 *gqlmodel.Pagination + if tmp, ok := rawArgs["pagination"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pagination")) + arg3, err = ec.unmarshalOPagination2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPagination(ctx, tmp) + if err != nil { + return nil, err + } + } + args["pagination"] = arg3 + return args, nil +} + +func (ec *executionContext) field_Query_checkProjectAlias_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 string + if tmp, ok := rawArgs["alias"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("alias")) + arg0, err = ec.unmarshalNString2string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["alias"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_datasetSchemas_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ID + if tmp, ok := rawArgs["sceneId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["sceneId"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg1 + var arg2 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg2, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg3 + var arg4 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg4, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg4 + return args, nil +} + +func (ec *executionContext) field_Query_datasets_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ID + if tmp, ok := rawArgs["datasetSchemaId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["datasetSchemaId"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg1 + var arg2 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg2, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg3 + var arg4 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg4, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg4 + return args, nil +} + +func (ec *executionContext) field_Query_dynamicDatasetSchemas_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ID + if tmp, ok := rawArgs["sceneId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["sceneId"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_layer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_node_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + var arg1 gqlmodel.NodeType + if tmp, ok := rawArgs["type"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("type")) + arg1, err = ec.unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNodeType(ctx, tmp) + if err != nil { + return nil, err + } + } + args["type"] = arg1 + return args, nil +} + +func (ec *executionContext) field_Query_nodes_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 []gqlmodel.ID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + var arg1 gqlmodel.NodeType + if tmp, ok := rawArgs["type"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("type")) + arg1, err = ec.unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNodeType(ctx, tmp) + if err != nil { + return nil, err + } + } + args["type"] = arg1 + return args, nil +} + +func (ec *executionContext) field_Query_plugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_plugins_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 []gqlmodel.ID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_projects_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ID + if tmp, ok := rawArgs["teamId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["teamId"] = arg0 + var arg1 *bool + if tmp, ok := rawArgs["includeArchived"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("includeArchived")) + arg1, err = ec.unmarshalOBoolean2แš–bool(ctx, tmp) + if err != nil { + return nil, err + } + } + args["includeArchived"] = arg1 + var arg2 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg2, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg2 + var arg3 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg3, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg3 + var arg4 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg4, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg4 + var arg5 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg5, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg5 + return args, nil +} + +func (ec *executionContext) field_Query_propertySchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_propertySchemas_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 []gqlmodel.ID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_scene_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.ID + if tmp, ok := rawArgs["projectId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["projectId"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_searchUser_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 string + if tmp, ok := rawArgs["nameOrEmail"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nameOrEmail")) + arg0, err = ec.unmarshalNString2string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["nameOrEmail"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Scene_datasetSchemas_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg0, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg1 + var arg2 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg2, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg3 + return args, nil +} + +func (ec *executionContext) field_Team_assets_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg0, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg1 + var arg2 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg2, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg3 + return args, nil +} + +func (ec *executionContext) field_Team_projects_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *bool + if tmp, ok := rawArgs["includeArchived"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("includeArchived")) + arg0, err = ec.unmarshalOBoolean2แš–bool(ctx, tmp) + if err != nil { + return nil, err + } + } + args["includeArchived"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg1 + var arg2 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg2, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg3 + var arg4 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg4, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg4 + return args, nil +} + +func (ec *executionContext) field___Type_enumValues_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 bool + if tmp, ok := rawArgs["includeDeprecated"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("includeDeprecated")) + arg0, err = ec.unmarshalOBoolean2bool(ctx, tmp) + if err != nil { + return nil, err + } + } + args["includeDeprecated"] = arg0 + return args, nil +} + +func (ec *executionContext) field___Type_fields_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 bool + if tmp, ok := rawArgs["includeDeprecated"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("includeDeprecated")) + arg0, err = ec.unmarshalOBoolean2bool(ctx, tmp) + if err != nil { + return nil, err + } + } + args["includeDeprecated"] = arg0 + return args, nil +} + +// endregion ***************************** args.gotpl ***************************** + +// region ************************** directives.gotpl ************************** + +// endregion ************************** directives.gotpl ************************** + +// region **************************** field.gotpl ***************************** + +func (ec *executionContext) _AddClusterPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddClusterPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddClusterPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddClusterPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddClusterPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddClusterPayload_cluster(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddClusterPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddClusterPayload_cluster(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cluster, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Cluster) + fc.Result = res + return ec.marshalNCluster2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCluster(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddClusterPayload_cluster(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddClusterPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Cluster_id(ctx, field) + case "name": + return ec.fieldContext_Cluster_name(ctx, field) + case "propertyId": + return ec.fieldContext_Cluster_propertyId(ctx, field) + case "property": + return ec.fieldContext_Cluster_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Cluster", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDatasetSchemaPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddDatasetSchemaPayload_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchema, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddDatasetSchemaPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddDynamicDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddDynamicDatasetPayload_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchema, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddDynamicDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddDynamicDatasetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddDynamicDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddDynamicDatasetPayload_dataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Dataset, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddDynamicDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddDynamicDatasetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddDynamicDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetSchemaPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddDynamicDatasetSchemaPayload_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchema, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddDynamicDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddDynamicDatasetSchemaPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddInfoboxFieldPayload_infoboxField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddInfoboxFieldPayload_infoboxField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.InfoboxField, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.InfoboxField) + fc.Result = res + return ec.marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddInfoboxFieldPayload_infoboxField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddInfoboxFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_InfoboxField_id(ctx, field) + case "sceneId": + return ec.fieldContext_InfoboxField_sceneId(ctx, field) + case "layerId": + return ec.fieldContext_InfoboxField_layerId(ctx, field) + case "propertyId": + return ec.fieldContext_InfoboxField_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_InfoboxField_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_InfoboxField_extensionId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_InfoboxField_linkedDatasetId(ctx, field) + case "layer": + return ec.fieldContext_InfoboxField_layer(ctx, field) + case "infobox": + return ec.fieldContext_InfoboxField_infobox(ctx, field) + case "property": + return ec.fieldContext_InfoboxField_property(ctx, field) + case "plugin": + return ec.fieldContext_InfoboxField_plugin(ctx, field) + case "extension": + return ec.fieldContext_InfoboxField_extension(ctx, field) + case "linkedDataset": + return ec.fieldContext_InfoboxField_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_InfoboxField_merged(ctx, field) + case "scene": + return ec.fieldContext_InfoboxField_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_InfoboxField_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type InfoboxField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddInfoboxFieldPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddInfoboxFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _AddLayerGroupPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddLayerGroupPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddLayerGroupPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddLayerGroupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddLayerGroupPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddLayerGroupPayload_parentLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddLayerGroupPayload_parentLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddLayerGroupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddLayerGroupPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddLayerGroupPayload_index(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Index, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2แš–int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddLayerGroupPayload_index(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddLayerGroupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _AddLayerItemPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddLayerItemPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerItem) + fc.Result = res + return ec.marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddLayerItemPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddLayerItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerItem_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerItem_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerItem_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerItem_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerItem_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerItem_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerItem_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerItem_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerItem_parentId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_LayerItem_linkedDatasetId(ctx, field) + case "tags": + return ec.fieldContext_LayerItem_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerItem_parent(ctx, field) + case "property": + return ec.fieldContext_LayerItem_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerItem_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerItem_extension(ctx, field) + case "linkedDataset": + return ec.fieldContext_LayerItem_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_LayerItem_merged(ctx, field) + case "scene": + return ec.fieldContext_LayerItem_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerItem_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerItem", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddLayerItemPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddLayerItemPayload_parentLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddLayerItemPayload_parentLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddLayerItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddLayerItemPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddLayerItemPayload_index(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Index, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2แš–int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddLayerItemPayload_index(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddLayerItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _AddMemberToTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddMemberToTeamPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddMemberToTeamPayload_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Team, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddMemberToTeamPayload_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddMemberToTeamPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddWidgetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddWidgetPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddWidgetPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddWidgetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AddWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddWidgetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddWidgetPayload_sceneWidget(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneWidget, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.SceneWidget) + fc.Result = res + return ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AddWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddWidgetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_SceneWidget_id(ctx, field) + case "pluginId": + return ec.fieldContext_SceneWidget_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_SceneWidget_extensionId(ctx, field) + case "propertyId": + return ec.fieldContext_SceneWidget_propertyId(ctx, field) + case "enabled": + return ec.fieldContext_SceneWidget_enabled(ctx, field) + case "extended": + return ec.fieldContext_SceneWidget_extended(ctx, field) + case "plugin": + return ec.fieldContext_SceneWidget_plugin(ctx, field) + case "extension": + return ec.fieldContext_SceneWidget_extension(ctx, field) + case "property": + return ec.fieldContext_SceneWidget_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type SceneWidget", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Asset_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Asset_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Asset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Asset_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_createdAt(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.CreatedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(time.Time) + fc.Result = res + return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Asset_createdAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Asset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Asset_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_teamId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TeamID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Asset_teamId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Asset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Asset_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Asset_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Asset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Asset_size(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_size(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Size, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int64) + fc.Result = res + return ec.marshalNFileSize2int64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Asset_size(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Asset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type FileSize does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Asset_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_url(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.URL, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Asset_url(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Asset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Asset_contentType(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_contentType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ContentType, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Asset_contentType(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Asset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Asset_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Asset().Team(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Team) + fc.Result = res + return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Asset_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Asset", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AssetConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AssetConnection_edges(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Edges, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.AssetEdge) + fc.Result = res + return ec.marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetEdgeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AssetConnection_edges(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AssetConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "cursor": + return ec.fieldContext_AssetEdge_cursor(ctx, field) + case "node": + return ec.fieldContext_AssetEdge_node(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AssetEdge", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AssetConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AssetConnection_nodes(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Nodes, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.Asset) + fc.Result = res + return ec.marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AssetConnection_nodes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AssetConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Asset_id(ctx, field) + case "createdAt": + return ec.fieldContext_Asset_createdAt(ctx, field) + case "teamId": + return ec.fieldContext_Asset_teamId(ctx, field) + case "name": + return ec.fieldContext_Asset_name(ctx, field) + case "size": + return ec.fieldContext_Asset_size(ctx, field) + case "url": + return ec.fieldContext_Asset_url(ctx, field) + case "contentType": + return ec.fieldContext_Asset_contentType(ctx, field) + case "team": + return ec.fieldContext_Asset_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Asset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AssetConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AssetConnection_pageInfo(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PageInfo, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.PageInfo) + fc.Result = res + return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AssetConnection_pageInfo(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AssetConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "startCursor": + return ec.fieldContext_PageInfo_startCursor(ctx, field) + case "endCursor": + return ec.fieldContext_PageInfo_endCursor(ctx, field) + case "hasNextPage": + return ec.fieldContext_PageInfo_hasNextPage(ctx, field) + case "hasPreviousPage": + return ec.fieldContext_PageInfo_hasPreviousPage(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PageInfo", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AssetConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AssetConnection_totalCount(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TotalCount, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AssetConnection_totalCount(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AssetConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _AssetEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AssetEdge_cursor(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(usecase.Cursor) + fc.Result = res + return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AssetEdge_cursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AssetEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _AssetEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AssetEdge_node(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Node, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Asset) + fc.Result = res + return ec.marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AssetEdge_node(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AssetEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Asset_id(ctx, field) + case "createdAt": + return ec.fieldContext_Asset_createdAt(ctx, field) + case "teamId": + return ec.fieldContext_Asset_teamId(ctx, field) + case "name": + return ec.fieldContext_Asset_name(ctx, field) + case "size": + return ec.fieldContext_Asset_size(ctx, field) + case "url": + return ec.fieldContext_Asset_url(ctx, field) + case "contentType": + return ec.fieldContext_Asset_contentType(ctx, field) + case "team": + return ec.fieldContext_Asset_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Asset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AttachTagItemToGroupPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AttachTagItemToGroupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AttachTagItemToGroupPayload_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tag, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.TagGroup) + fc.Result = res + return ec.marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AttachTagItemToGroupPayload_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AttachTagItemToGroupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagGroup_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagGroup_label(ctx, field) + case "tagIds": + return ec.fieldContext_TagGroup_tagIds(ctx, field) + case "tags": + return ec.fieldContext_TagGroup_tags(ctx, field) + case "scene": + return ec.fieldContext_TagGroup_scene(ctx, field) + case "layers": + return ec.fieldContext_TagGroup_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _AttachTagToLayerPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AttachTagToLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AttachTagToLayerPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_AttachTagToLayerPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AttachTagToLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _Camera_lat(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_lat(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lat, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Camera_lat(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Camera", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Camera_lng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_lng(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lng, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Camera_lng(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Camera", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Camera_altitude(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_altitude(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Altitude, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Camera_altitude(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Camera", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Camera_heading(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_heading(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Heading, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Camera_heading(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Camera", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Camera_pitch(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_pitch(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Pitch, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Camera_pitch(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Camera", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Camera_roll(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_roll(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Roll, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Camera_roll(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Camera", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Camera_fov(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_fov(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fov, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Camera_fov(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Camera", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Cluster_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Cluster_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Cluster_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Cluster", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Cluster_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Cluster_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Cluster_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Cluster", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Cluster_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Cluster_propertyId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Cluster_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Cluster", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Cluster_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Cluster_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Cluster().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Cluster_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Cluster", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _CreateAssetPayload_asset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateAssetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateAssetPayload_asset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Asset, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Asset) + fc.Result = res + return ec.marshalNAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_CreateAssetPayload_asset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateAssetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Asset_id(ctx, field) + case "createdAt": + return ec.fieldContext_Asset_createdAt(ctx, field) + case "teamId": + return ec.fieldContext_Asset_teamId(ctx, field) + case "name": + return ec.fieldContext_Asset_name(ctx, field) + case "size": + return ec.fieldContext_Asset_size(ctx, field) + case "url": + return ec.fieldContext_Asset_url(ctx, field) + case "contentType": + return ec.fieldContext_Asset_contentType(ctx, field) + case "team": + return ec.fieldContext_Asset_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Asset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _CreateInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateInfoboxPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateInfoboxPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_CreateInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateInfoboxPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _CreateScenePayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateScenePayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateScenePayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_CreateScenePayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateScenePayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _CreateTagGroupPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTagGroupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateTagGroupPayload_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tag, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.TagGroup) + fc.Result = res + return ec.marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_CreateTagGroupPayload_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateTagGroupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagGroup_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagGroup_label(ctx, field) + case "tagIds": + return ec.fieldContext_TagGroup_tagIds(ctx, field) + case "tags": + return ec.fieldContext_TagGroup_tags(ctx, field) + case "scene": + return ec.fieldContext_TagGroup_scene(ctx, field) + case "layers": + return ec.fieldContext_TagGroup_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _CreateTagItemPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTagItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateTagItemPayload_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tag, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.TagItem) + fc.Result = res + return ec.marshalNTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItem(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_CreateTagItemPayload_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateTagItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagItem_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagItem_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagItem_label(ctx, field) + case "parentId": + return ec.fieldContext_TagItem_parentId(ctx, field) + case "linkedDatasetID": + return ec.fieldContext_TagItem_linkedDatasetID(ctx, field) + case "linkedDatasetSchemaID": + return ec.fieldContext_TagItem_linkedDatasetSchemaID(ctx, field) + case "linkedDatasetFieldID": + return ec.fieldContext_TagItem_linkedDatasetFieldID(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_TagItem_linkedDatasetSchema(ctx, field) + case "linkedDataset": + return ec.fieldContext_TagItem_linkedDataset(ctx, field) + case "linkedDatasetField": + return ec.fieldContext_TagItem_linkedDatasetField(ctx, field) + case "parent": + return ec.fieldContext_TagItem_parent(ctx, field) + case "layers": + return ec.fieldContext_TagItem_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagItem", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _CreateTagItemPayload_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTagItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateTagItemPayload_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Parent, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.TagGroup) + fc.Result = res + return ec.marshalOTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_CreateTagItemPayload_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateTagItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagGroup_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagGroup_label(ctx, field) + case "tagIds": + return ec.fieldContext_TagGroup_tagIds(ctx, field) + case "tags": + return ec.fieldContext_TagGroup_tags(ctx, field) + case "scene": + return ec.fieldContext_TagGroup_scene(ctx, field) + case "layers": + return ec.fieldContext_TagGroup_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _CreateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTeamPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateTeamPayload_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Team, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_CreateTeamPayload_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateTeamPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Dataset_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Dataset_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Dataset_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Dataset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Dataset_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Dataset_source(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Source, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Dataset_source(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Dataset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Dataset_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Dataset_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Dataset_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Dataset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Dataset_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Dataset_fields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.DatasetField) + fc.Result = res + return ec.marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Dataset_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Dataset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_DatasetField_fieldId(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetField_schemaId(ctx, field) + case "source": + return ec.fieldContext_DatasetField_source(ctx, field) + case "type": + return ec.fieldContext_DatasetField_type(ctx, field) + case "value": + return ec.fieldContext_DatasetField_value(ctx, field) + case "schema": + return ec.fieldContext_DatasetField_schema(ctx, field) + case "field": + return ec.fieldContext_DatasetField_field(ctx, field) + case "valueRef": + return ec.fieldContext_DatasetField_valueRef(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Dataset_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Dataset_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Dataset().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Dataset_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Dataset", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Dataset_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Dataset_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Dataset().Name(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Dataset_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Dataset", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetConnection_edges(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Edges, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.DatasetEdge) + fc.Result = res + return ec.marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetEdgeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetConnection_edges(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "cursor": + return ec.fieldContext_DatasetEdge_cursor(ctx, field) + case "node": + return ec.fieldContext_DatasetEdge_node(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetEdge", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetConnection_nodes(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Nodes, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.Dataset) + fc.Result = res + return ec.marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetConnection_nodes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetConnection_pageInfo(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PageInfo, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.PageInfo) + fc.Result = res + return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetConnection_pageInfo(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "startCursor": + return ec.fieldContext_PageInfo_startCursor(ctx, field) + case "endCursor": + return ec.fieldContext_PageInfo_endCursor(ctx, field) + case "hasNextPage": + return ec.fieldContext_PageInfo_hasNextPage(ctx, field) + case "hasPreviousPage": + return ec.fieldContext_PageInfo_hasPreviousPage(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PageInfo", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetConnection_totalCount(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TotalCount, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetConnection_totalCount(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetEdge_cursor(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(usecase.Cursor) + fc.Result = res + return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetEdge_cursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetEdge_node(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Node, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetEdge_node(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_fieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetField_fieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetField_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetField_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_source(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Source, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetField_source(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ValueType) + fc.Result = res + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetField_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ValueType does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_value(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Value, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetField_value(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetField().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetField_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetField_field(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_field(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetField().Field(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchemaField) + fc.Result = res + return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetField_field(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchemaField_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchemaField_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchemaField_name(ctx, field) + case "type": + return ec.fieldContext_DatasetSchemaField_type(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetSchemaField_schemaId(ctx, field) + case "refId": + return ec.fieldContext_DatasetSchemaField_refId(ctx, field) + case "schema": + return ec.fieldContext_DatasetSchemaField_schema(ctx, field) + case "ref": + return ec.fieldContext_DatasetSchemaField_ref(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetField_valueRef(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_valueRef(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetField().ValueRef(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetField_valueRef(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchema_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchema_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchema_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_source(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Source, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchema_source(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchema_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchema_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchema_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_sceneId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchema_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchema_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_fields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.DatasetSchemaField) + fc.Result = res + return ec.marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchema_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchemaField_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchemaField_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchemaField_name(ctx, field) + case "type": + return ec.fieldContext_DatasetSchemaField_type(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetSchemaField_schemaId(ctx, field) + case "refId": + return ec.fieldContext_DatasetSchemaField_refId(ctx, field) + case "schema": + return ec.fieldContext_DatasetSchemaField_schema(ctx, field) + case "ref": + return ec.fieldContext_DatasetSchemaField_ref(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchema_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_totalCount(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetSchema().TotalCount(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchema_totalCount(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchema_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RepresentativeFieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchema_representativeFieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchema_dynamic(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_dynamic(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Dynamic, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*bool) + fc.Result = res + return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchema_dynamic(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchema_datasets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_datasets(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetSchema().Datasets(rctx, obj, fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetConnection) + fc.Result = res + return ec.marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchema_datasets(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_DatasetConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_DatasetConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_DatasetConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetConnection", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_DatasetSchema_datasets_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchema_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetSchema().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchema_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchema_representativeField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_representativeField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetSchema().RepresentativeField(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchemaField) + fc.Result = res + return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchema_representativeField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchemaField_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchemaField_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchemaField_name(ctx, field) + case "type": + return ec.fieldContext_DatasetSchemaField_type(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetSchemaField_schemaId(ctx, field) + case "refId": + return ec.fieldContext_DatasetSchemaField_refId(ctx, field) + case "schema": + return ec.fieldContext_DatasetSchemaField_schema(ctx, field) + case "ref": + return ec.fieldContext_DatasetSchemaField_ref(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaConnection_edges(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Edges, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.DatasetSchemaEdge) + fc.Result = res + return ec.marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaEdgeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaConnection_edges(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "cursor": + return ec.fieldContext_DatasetSchemaEdge_cursor(ctx, field) + case "node": + return ec.fieldContext_DatasetSchemaEdge_node(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaEdge", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaConnection_nodes(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Nodes, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaConnection_nodes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaConnection_pageInfo(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PageInfo, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.PageInfo) + fc.Result = res + return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaConnection_pageInfo(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "startCursor": + return ec.fieldContext_PageInfo_startCursor(ctx, field) + case "endCursor": + return ec.fieldContext_PageInfo_endCursor(ctx, field) + case "hasNextPage": + return ec.fieldContext_PageInfo_hasNextPage(ctx, field) + case "hasPreviousPage": + return ec.fieldContext_PageInfo_hasPreviousPage(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PageInfo", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaConnection_totalCount(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TotalCount, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaConnection_totalCount(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaEdge_cursor(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(usecase.Cursor) + fc.Result = res + return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaEdge_cursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaEdge_node(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Node, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaEdge_node(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaField_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaField_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaField_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_source(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Source, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaField_source(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaField_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaField_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ValueType) + fc.Result = res + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaField_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ValueType does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaField_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaField_refId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_refId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RefID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaField_refId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetSchemaField().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaField_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DatasetSchemaField_ref(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_ref(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetSchemaField().Ref(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchemaField_ref(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _DeleteMePayload_userId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DeleteMePayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DeleteMePayload_userId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UserID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DeleteMePayload_userId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DeleteMePayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DeleteProjectPayload_projectId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DeleteProjectPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DeleteProjectPayload_projectId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ProjectID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DeleteProjectPayload_projectId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DeleteProjectPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DeleteTeamPayload_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DeleteTeamPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DeleteTeamPayload_teamId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TeamID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DeleteTeamPayload_teamId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DeleteTeamPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _DetachTagFromLayerPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DetachTagFromLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DetachTagFromLayerPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DetachTagFromLayerPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DetachTagFromLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _DetachTagItemFromGroupPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DetachTagItemFromGroupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DetachTagItemFromGroupPayload_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tag, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.TagGroup) + fc.Result = res + return ec.marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DetachTagItemFromGroupPayload_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DetachTagItemFromGroupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagGroup_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagGroup_label(ctx, field) + case "tagIds": + return ec.fieldContext_TagGroup_tagIds(ctx, field) + case "tags": + return ec.fieldContext_TagGroup_tags(ctx, field) + case "scene": + return ec.fieldContext_TagGroup_scene(ctx, field) + case "layers": + return ec.fieldContext_TagGroup_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _ImportDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ImportDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ImportDatasetPayload_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchema, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ImportDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ImportDatasetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _ImportLayerPayload_layers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ImportLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ImportLayerPayload_layers(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layers, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ImportLayerPayload_layers(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ImportLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _ImportLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ImportLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ImportLayerPayload_parentLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ImportLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ImportLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Infobox_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_sceneId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Infobox_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Infobox_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_layerId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LayerID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Infobox_layerId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Infobox_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_propertyId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Infobox_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Infobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_fields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.InfoboxField) + fc.Result = res + return ec.marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Infobox_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_InfoboxField_id(ctx, field) + case "sceneId": + return ec.fieldContext_InfoboxField_sceneId(ctx, field) + case "layerId": + return ec.fieldContext_InfoboxField_layerId(ctx, field) + case "propertyId": + return ec.fieldContext_InfoboxField_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_InfoboxField_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_InfoboxField_extensionId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_InfoboxField_linkedDatasetId(ctx, field) + case "layer": + return ec.fieldContext_InfoboxField_layer(ctx, field) + case "infobox": + return ec.fieldContext_InfoboxField_infobox(ctx, field) + case "property": + return ec.fieldContext_InfoboxField_property(ctx, field) + case "plugin": + return ec.fieldContext_InfoboxField_plugin(ctx, field) + case "extension": + return ec.fieldContext_InfoboxField_extension(ctx, field) + case "linkedDataset": + return ec.fieldContext_InfoboxField_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_InfoboxField_merged(ctx, field) + case "scene": + return ec.fieldContext_InfoboxField_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_InfoboxField_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type InfoboxField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Infobox_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_linkedDatasetId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Infobox_linkedDatasetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Infobox_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Infobox().Layer(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Infobox_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _Infobox_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Infobox().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Infobox_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Infobox_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_linkedDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Infobox().LinkedDataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Infobox_linkedDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Infobox_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_merged(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Infobox().Merged(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.MergedInfobox) + fc.Result = res + return ec.marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfobox(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Infobox_merged(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sceneID": + return ec.fieldContext_MergedInfobox_sceneID(ctx, field) + case "property": + return ec.fieldContext_MergedInfobox_property(ctx, field) + case "fields": + return ec.fieldContext_MergedInfobox_fields(ctx, field) + case "scene": + return ec.fieldContext_MergedInfobox_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedInfobox", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Infobox_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Infobox().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Infobox_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_sceneId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_layerId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LayerID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_layerId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_propertyId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_pluginId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_extensionId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_extensionId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_linkedDatasetId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_linkedDatasetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Layer(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_infobox(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Infobox(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Infobox) + fc.Result = res + return ec.marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_infobox(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sceneId": + return ec.fieldContext_Infobox_sceneId(ctx, field) + case "layerId": + return ec.fieldContext_Infobox_layerId(ctx, field) + case "propertyId": + return ec.fieldContext_Infobox_propertyId(ctx, field) + case "fields": + return ec.fieldContext_Infobox_fields(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_Infobox_linkedDatasetId(ctx, field) + case "layer": + return ec.fieldContext_Infobox_layer(ctx, field) + case "property": + return ec.fieldContext_Infobox_property(ctx, field) + case "linkedDataset": + return ec.fieldContext_Infobox_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_Infobox_merged(ctx, field) + case "scene": + return ec.fieldContext_Infobox_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Infobox", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_extension(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Extension(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PluginExtension) + fc.Result = res + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_extension(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extensionId": + return ec.fieldContext_PluginExtension_extensionId(ctx, field) + case "pluginId": + return ec.fieldContext_PluginExtension_pluginId(ctx, field) + case "type": + return ec.fieldContext_PluginExtension_type(ctx, field) + case "name": + return ec.fieldContext_PluginExtension_name(ctx, field) + case "description": + return ec.fieldContext_PluginExtension_description(ctx, field) + case "icon": + return ec.fieldContext_PluginExtension_icon(ctx, field) + case "singleOnly": + return ec.fieldContext_PluginExtension_singleOnly(ctx, field) + case "widgetLayout": + return ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + case "visualizer": + return ec.fieldContext_PluginExtension_visualizer(ctx, field) + case "propertySchemaId": + return ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + case "allTranslatedName": + return ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + case "plugin": + return ec.fieldContext_PluginExtension_plugin(ctx, field) + case "sceneWidget": + return ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + case "propertySchema": + return ec.fieldContext_PluginExtension_propertySchema(ctx, field) + case "translatedName": + return ec.fieldContext_PluginExtension_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginExtension", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_linkedDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().LinkedDataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_linkedDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_merged(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Merged(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.MergedInfoboxField) + fc.Result = res + return ec.marshalOMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_merged(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedInfoboxField_originalId(ctx, field) + case "sceneID": + return ec.fieldContext_MergedInfoboxField_sceneID(ctx, field) + case "pluginId": + return ec.fieldContext_MergedInfoboxField_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_MergedInfoboxField_extensionId(ctx, field) + case "property": + return ec.fieldContext_MergedInfoboxField_property(ctx, field) + case "plugin": + return ec.fieldContext_MergedInfoboxField_plugin(ctx, field) + case "extension": + return ec.fieldContext_MergedInfoboxField_extension(ctx, field) + case "scene": + return ec.fieldContext_MergedInfoboxField_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_MergedInfoboxField_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedInfoboxField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _InfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().ScenePlugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ScenePlugin) + fc.Result = res + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _InstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InstallPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InstallPluginPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InstallPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _InstallPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InstallPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InstallPluginPayload_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ScenePlugin, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.ScenePlugin) + fc.Result = res + return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_InstallPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InstallPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LatLng_lat(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLng) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LatLng_lat(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lat, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LatLng_lat(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LatLng", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LatLng_lng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLng) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LatLng_lng(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lng, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LatLng_lng(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LatLng", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LatLngHeight_lat(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLngHeight) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LatLngHeight_lat(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lat, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LatLngHeight_lat(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LatLngHeight", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LatLngHeight_lng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLngHeight) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LatLngHeight_lng(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lng, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LatLngHeight_lng(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LatLngHeight", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LatLngHeight_height(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLngHeight) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LatLngHeight_height(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Height, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LatLngHeight_height(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LatLngHeight", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_sceneId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_isVisible(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_isVisible(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsVisible, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_isVisible(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_propertyId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_pluginId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_extensionId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_extensionId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_infobox(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Infobox, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Infobox) + fc.Result = res + return ec.marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_infobox(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sceneId": + return ec.fieldContext_Infobox_sceneId(ctx, field) + case "layerId": + return ec.fieldContext_Infobox_layerId(ctx, field) + case "propertyId": + return ec.fieldContext_Infobox_propertyId(ctx, field) + case "fields": + return ec.fieldContext_Infobox_fields(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_Infobox_linkedDatasetId(ctx, field) + case "layer": + return ec.fieldContext_Infobox_layer(ctx, field) + case "property": + return ec.fieldContext_Infobox_property(ctx, field) + case "linkedDataset": + return ec.fieldContext_Infobox_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_Infobox_merged(ctx, field) + case "scene": + return ec.fieldContext_Infobox_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Infobox", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_parentId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_linkedDatasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetSchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_linkedDatasetSchemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_root(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_root(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Root, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_root(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_layerIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_layerIds(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LayerIds, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.ID) + fc.Result = res + return ec.marshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_layerIds(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_tags(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tags, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.LayerTag) + fc.Result = res + return ec.marshalNLayerTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_tags(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerGroup) + fc.Result = res + return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_extension(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Extension(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PluginExtension) + fc.Result = res + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_extension(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extensionId": + return ec.fieldContext_PluginExtension_extensionId(ctx, field) + case "pluginId": + return ec.fieldContext_PluginExtension_pluginId(ctx, field) + case "type": + return ec.fieldContext_PluginExtension_type(ctx, field) + case "name": + return ec.fieldContext_PluginExtension_name(ctx, field) + case "description": + return ec.fieldContext_PluginExtension_description(ctx, field) + case "icon": + return ec.fieldContext_PluginExtension_icon(ctx, field) + case "singleOnly": + return ec.fieldContext_PluginExtension_singleOnly(ctx, field) + case "widgetLayout": + return ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + case "visualizer": + return ec.fieldContext_PluginExtension_visualizer(ctx, field) + case "propertySchemaId": + return ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + case "allTranslatedName": + return ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + case "plugin": + return ec.fieldContext_PluginExtension_plugin(ctx, field) + case "sceneWidget": + return ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + case "propertySchema": + return ec.fieldContext_PluginExtension_propertySchema(ctx, field) + case "translatedName": + return ec.fieldContext_PluginExtension_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginExtension", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().LinkedDatasetSchema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_layers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_layers(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Layers(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_layers(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerGroup_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().ScenePlugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ScenePlugin) + fc.Result = res + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerGroup_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_sceneId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_isVisible(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_isVisible(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsVisible, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_isVisible(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_propertyId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_pluginId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_extensionId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_extensionId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_infobox(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Infobox, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Infobox) + fc.Result = res + return ec.marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_infobox(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sceneId": + return ec.fieldContext_Infobox_sceneId(ctx, field) + case "layerId": + return ec.fieldContext_Infobox_layerId(ctx, field) + case "propertyId": + return ec.fieldContext_Infobox_propertyId(ctx, field) + case "fields": + return ec.fieldContext_Infobox_fields(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_Infobox_linkedDatasetId(ctx, field) + case "layer": + return ec.fieldContext_Infobox_layer(ctx, field) + case "property": + return ec.fieldContext_Infobox_property(ctx, field) + case "linkedDataset": + return ec.fieldContext_Infobox_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_Infobox_merged(ctx, field) + case "scene": + return ec.fieldContext_Infobox_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Infobox", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_parentId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_linkedDatasetId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_linkedDatasetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_tags(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tags, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.LayerTag) + fc.Result = res + return ec.marshalNLayerTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_tags(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerGroup) + fc.Result = res + return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_extension(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().Extension(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PluginExtension) + fc.Result = res + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_extension(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extensionId": + return ec.fieldContext_PluginExtension_extensionId(ctx, field) + case "pluginId": + return ec.fieldContext_PluginExtension_pluginId(ctx, field) + case "type": + return ec.fieldContext_PluginExtension_type(ctx, field) + case "name": + return ec.fieldContext_PluginExtension_name(ctx, field) + case "description": + return ec.fieldContext_PluginExtension_description(ctx, field) + case "icon": + return ec.fieldContext_PluginExtension_icon(ctx, field) + case "singleOnly": + return ec.fieldContext_PluginExtension_singleOnly(ctx, field) + case "widgetLayout": + return ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + case "visualizer": + return ec.fieldContext_PluginExtension_visualizer(ctx, field) + case "propertySchemaId": + return ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + case "allTranslatedName": + return ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + case "plugin": + return ec.fieldContext_PluginExtension_plugin(ctx, field) + case "sceneWidget": + return ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + case "propertySchema": + return ec.fieldContext_PluginExtension_propertySchema(ctx, field) + case "translatedName": + return ec.fieldContext_PluginExtension_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginExtension", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_linkedDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().LinkedDataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_linkedDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_merged(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().Merged(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.MergedLayer) + fc.Result = res + return ec.marshalOMergedLayer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_merged(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedLayer_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedLayer_parentId(ctx, field) + case "sceneID": + return ec.fieldContext_MergedLayer_sceneID(ctx, field) + case "property": + return ec.fieldContext_MergedLayer_property(ctx, field) + case "infobox": + return ec.fieldContext_MergedLayer_infobox(ctx, field) + case "original": + return ec.fieldContext_MergedLayer_original(ctx, field) + case "parent": + return ec.fieldContext_MergedLayer_parent(ctx, field) + case "scene": + return ec.fieldContext_MergedLayer_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedLayer", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerItem_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().ScenePlugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ScenePlugin) + fc.Result = res + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerItem_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerTagGroup_tagId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerTagGroup_tagId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TagID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerTagGroup_tagId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerTagGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerTagGroup_children(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerTagGroup_children(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Children, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.LayerTagItem) + fc.Result = res + return ec.marshalNLayerTagItem2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagItemแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerTagGroup_children(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerTagGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tagId": + return ec.fieldContext_LayerTagItem_tagId(ctx, field) + case "tag": + return ec.fieldContext_LayerTagItem_tag(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerTagItem", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerTagGroup_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerTagGroup_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerTagGroup().Tag(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(gqlmodel.Tag) + fc.Result = res + return ec.marshalOTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerTagGroup_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerTagGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerTagItem_tagId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerTagItem_tagId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TagID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerTagItem_tagId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerTagItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _LayerTagItem_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerTagItem_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerTagItem().Tag(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(gqlmodel.Tag) + fc.Result = res + return ec.marshalOTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_LayerTagItem_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerTagItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _Me_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Me_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Me", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Me_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Me_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Me", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Me_email(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_email(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Email, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Me_email(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Me", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Me_lang(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_lang(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lang, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(language.Tag) + fc.Result = res + return ec.marshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Me_lang(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Me", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Lang does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Me_theme(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_theme(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Theme, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Theme) + fc.Result = res + return ec.marshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Me_theme(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Me", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Theme does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Me_myTeamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_myTeamId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.MyTeamID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Me_myTeamId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Me", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Me_auths(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_auths(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Auths, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]string) + fc.Result = res + return ec.marshalNString2แš•stringแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Me_auths(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Me", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Me_teams(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_teams(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Me().Teams(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.Team) + fc.Result = res + return ec.marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Me_teams(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Me", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Me_myTeam(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_myTeam(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Me().MyTeam(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Me_myTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Me", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfobox_sceneID(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfobox_sceneID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfobox", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfobox_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.MergedProperty) + fc.Result = res + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfobox_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfobox", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedProperty_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedProperty_parentId(ctx, field) + case "schemaId": + return ec.fieldContext_MergedProperty_schemaId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_MergedProperty_linkedDatasetId(ctx, field) + case "original": + return ec.fieldContext_MergedProperty_original(ctx, field) + case "parent": + return ec.fieldContext_MergedProperty_parent(ctx, field) + case "schema": + return ec.fieldContext_MergedProperty_schema(ctx, field) + case "linkedDataset": + return ec.fieldContext_MergedProperty_linkedDataset(ctx, field) + case "groups": + return ec.fieldContext_MergedProperty_groups(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedProperty", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfobox_fields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.MergedInfoboxField) + fc.Result = res + return ec.marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfobox_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfobox", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedInfoboxField_originalId(ctx, field) + case "sceneID": + return ec.fieldContext_MergedInfoboxField_sceneID(ctx, field) + case "pluginId": + return ec.fieldContext_MergedInfoboxField_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_MergedInfoboxField_extensionId(ctx, field) + case "property": + return ec.fieldContext_MergedInfoboxField_property(ctx, field) + case "plugin": + return ec.fieldContext_MergedInfoboxField_plugin(ctx, field) + case "extension": + return ec.fieldContext_MergedInfoboxField_extension(ctx, field) + case "scene": + return ec.fieldContext_MergedInfoboxField_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_MergedInfoboxField_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedInfoboxField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfobox_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfobox().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfobox_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfobox", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_originalId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OriginalID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfoboxField_originalId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_sceneID(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfoboxField_sceneID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_pluginId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_extensionId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.MergedProperty) + fc.Result = res + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedProperty_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedProperty_parentId(ctx, field) + case "schemaId": + return ec.fieldContext_MergedProperty_schemaId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_MergedProperty_linkedDatasetId(ctx, field) + case "original": + return ec.fieldContext_MergedProperty_original(ctx, field) + case "parent": + return ec.fieldContext_MergedProperty_parent(ctx, field) + case "schema": + return ec.fieldContext_MergedProperty_schema(ctx, field) + case "linkedDataset": + return ec.fieldContext_MergedProperty_linkedDataset(ctx, field) + case "groups": + return ec.fieldContext_MergedProperty_groups(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedProperty", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfoboxField().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfoboxField_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_extension(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfoboxField().Extension(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PluginExtension) + fc.Result = res + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfoboxField_extension(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extensionId": + return ec.fieldContext_PluginExtension_extensionId(ctx, field) + case "pluginId": + return ec.fieldContext_PluginExtension_pluginId(ctx, field) + case "type": + return ec.fieldContext_PluginExtension_type(ctx, field) + case "name": + return ec.fieldContext_PluginExtension_name(ctx, field) + case "description": + return ec.fieldContext_PluginExtension_description(ctx, field) + case "icon": + return ec.fieldContext_PluginExtension_icon(ctx, field) + case "singleOnly": + return ec.fieldContext_PluginExtension_singleOnly(ctx, field) + case "widgetLayout": + return ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + case "visualizer": + return ec.fieldContext_PluginExtension_visualizer(ctx, field) + case "propertySchemaId": + return ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + case "allTranslatedName": + return ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + case "plugin": + return ec.fieldContext_PluginExtension_plugin(ctx, field) + case "sceneWidget": + return ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + case "propertySchema": + return ec.fieldContext_PluginExtension_propertySchema(ctx, field) + case "translatedName": + return ec.fieldContext_PluginExtension_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginExtension", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfoboxField_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfoboxField().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfoboxField_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfoboxField().ScenePlugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ScenePlugin) + fc.Result = res + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedInfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedLayer_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_originalId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OriginalID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedLayer_originalId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedLayer_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_parentId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedLayer_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedLayer_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_sceneID(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedLayer_sceneID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedLayer_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.MergedProperty) + fc.Result = res + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedLayer_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedProperty_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedProperty_parentId(ctx, field) + case "schemaId": + return ec.fieldContext_MergedProperty_schemaId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_MergedProperty_linkedDatasetId(ctx, field) + case "original": + return ec.fieldContext_MergedProperty_original(ctx, field) + case "parent": + return ec.fieldContext_MergedProperty_parent(ctx, field) + case "schema": + return ec.fieldContext_MergedProperty_schema(ctx, field) + case "linkedDataset": + return ec.fieldContext_MergedProperty_linkedDataset(ctx, field) + case "groups": + return ec.fieldContext_MergedProperty_groups(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedProperty", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedLayer_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_infobox(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Infobox, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.MergedInfobox) + fc.Result = res + return ec.marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfobox(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedLayer_infobox(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sceneID": + return ec.fieldContext_MergedInfobox_sceneID(ctx, field) + case "property": + return ec.fieldContext_MergedInfobox_property(ctx, field) + case "fields": + return ec.fieldContext_MergedInfobox_fields(ctx, field) + case "scene": + return ec.fieldContext_MergedInfobox_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedInfobox", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedLayer_original(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_original(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedLayer().Original(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerItem) + fc.Result = res + return ec.marshalOLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedLayer_original(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerItem_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerItem_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerItem_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerItem_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerItem_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerItem_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerItem_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerItem_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerItem_parentId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_LayerItem_linkedDatasetId(ctx, field) + case "tags": + return ec.fieldContext_LayerItem_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerItem_parent(ctx, field) + case "property": + return ec.fieldContext_LayerItem_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerItem_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerItem_extension(ctx, field) + case "linkedDataset": + return ec.fieldContext_LayerItem_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_LayerItem_merged(ctx, field) + case "scene": + return ec.fieldContext_LayerItem_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerItem_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerItem", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedLayer_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedLayer().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerGroup) + fc.Result = res + return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedLayer_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedLayer_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedLayer().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedLayer_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedProperty_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_originalId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OriginalID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedProperty_originalId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedProperty_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_parentId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedProperty_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedProperty_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedProperty_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedProperty_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_linkedDatasetId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedProperty_linkedDatasetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedProperty_original(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_original(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedProperty().Original(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedProperty_original(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedProperty_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedProperty().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedProperty_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedProperty_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedProperty().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedProperty_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedProperty_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_linkedDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedProperty().LinkedDataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedProperty_linkedDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedProperty_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_groups(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedProperty().Groups(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.MergedPropertyGroup) + fc.Result = res + return ec.marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroupแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedProperty_groups(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalPropertyId": + return ec.fieldContext_MergedPropertyGroup_originalPropertyId(ctx, field) + case "parentPropertyId": + return ec.fieldContext_MergedPropertyGroup_parentPropertyId(ctx, field) + case "originalId": + return ec.fieldContext_MergedPropertyGroup_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedPropertyGroup_parentId(ctx, field) + case "schemaGroupId": + return ec.fieldContext_MergedPropertyGroup_schemaGroupId(ctx, field) + case "schemaId": + return ec.fieldContext_MergedPropertyGroup_schemaId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_MergedPropertyGroup_linkedDatasetId(ctx, field) + case "fields": + return ec.fieldContext_MergedPropertyGroup_fields(ctx, field) + case "groups": + return ec.fieldContext_MergedPropertyGroup_groups(ctx, field) + case "originalProperty": + return ec.fieldContext_MergedPropertyGroup_originalProperty(ctx, field) + case "parentProperty": + return ec.fieldContext_MergedPropertyGroup_parentProperty(ctx, field) + case "original": + return ec.fieldContext_MergedPropertyGroup_original(ctx, field) + case "parent": + return ec.fieldContext_MergedPropertyGroup_parent(ctx, field) + case "schema": + return ec.fieldContext_MergedPropertyGroup_schema(ctx, field) + case "linkedDataset": + return ec.fieldContext_MergedPropertyGroup_linkedDataset(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedPropertyGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyField_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_fieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyField_fieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_value(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Value, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyField_value(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ValueType) + fc.Result = res + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyField_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ValueType does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyField_links(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_links(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Links, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]*gqlmodel.PropertyFieldLink) + fc.Result = res + return ec.marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLinkแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyField_links(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetId": + return ec.fieldContext_PropertyFieldLink_datasetId(ctx, field) + case "datasetSchemaId": + return ec.fieldContext_PropertyFieldLink_datasetSchemaId(ctx, field) + case "datasetSchemaFieldId": + return ec.fieldContext_PropertyFieldLink_datasetSchemaFieldId(ctx, field) + case "dataset": + return ec.fieldContext_PropertyFieldLink_dataset(ctx, field) + case "datasetField": + return ec.fieldContext_PropertyFieldLink_datasetField(ctx, field) + case "datasetSchema": + return ec.fieldContext_PropertyFieldLink_datasetSchema(ctx, field) + case "datasetSchemaField": + return ec.fieldContext_PropertyFieldLink_datasetSchemaField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldLink", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyField_overridden(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_overridden(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Overridden, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyField_overridden(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyField().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyField_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyField_field(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_field(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyField().Field(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchemaField) + fc.Result = res + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyField_field(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertySchemaField_type(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaField_title(ctx, field) + case "description": + return ec.fieldContext_PropertySchemaField_description(ctx, field) + case "prefix": + return ec.fieldContext_PropertySchemaField_prefix(ctx, field) + case "suffix": + return ec.fieldContext_PropertySchemaField_suffix(ctx, field) + case "defaultValue": + return ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + case "ui": + return ec.fieldContext_PropertySchemaField_ui(ctx, field) + case "min": + return ec.fieldContext_PropertySchemaField_min(ctx, field) + case "max": + return ec.fieldContext_PropertySchemaField_max(ctx, field) + case "choices": + return ec.fieldContext_PropertySchemaField_choices(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + case "translatedDescription": + return ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyField_actualValue(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_actualValue(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyField().ActualValue(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyField_actualValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_originalPropertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_originalPropertyId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OriginalPropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_originalPropertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_parentPropertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_parentPropertyId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentPropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_parentPropertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_originalId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OriginalID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_originalId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_parentId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_schemaGroupId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaGroupID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_linkedDatasetId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_linkedDatasetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_fields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.MergedPropertyField) + fc.Result = res + return ec.marshalNMergedPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "schemaId": + return ec.fieldContext_MergedPropertyField_schemaId(ctx, field) + case "fieldId": + return ec.fieldContext_MergedPropertyField_fieldId(ctx, field) + case "value": + return ec.fieldContext_MergedPropertyField_value(ctx, field) + case "type": + return ec.fieldContext_MergedPropertyField_type(ctx, field) + case "links": + return ec.fieldContext_MergedPropertyField_links(ctx, field) + case "overridden": + return ec.fieldContext_MergedPropertyField_overridden(ctx, field) + case "schema": + return ec.fieldContext_MergedPropertyField_schema(ctx, field) + case "field": + return ec.fieldContext_MergedPropertyField_field(ctx, field) + case "actualValue": + return ec.fieldContext_MergedPropertyField_actualValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedPropertyField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_groups(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Groups, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.MergedPropertyGroup) + fc.Result = res + return ec.marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroupแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_groups(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalPropertyId": + return ec.fieldContext_MergedPropertyGroup_originalPropertyId(ctx, field) + case "parentPropertyId": + return ec.fieldContext_MergedPropertyGroup_parentPropertyId(ctx, field) + case "originalId": + return ec.fieldContext_MergedPropertyGroup_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedPropertyGroup_parentId(ctx, field) + case "schemaGroupId": + return ec.fieldContext_MergedPropertyGroup_schemaGroupId(ctx, field) + case "schemaId": + return ec.fieldContext_MergedPropertyGroup_schemaId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_MergedPropertyGroup_linkedDatasetId(ctx, field) + case "fields": + return ec.fieldContext_MergedPropertyGroup_fields(ctx, field) + case "groups": + return ec.fieldContext_MergedPropertyGroup_groups(ctx, field) + case "originalProperty": + return ec.fieldContext_MergedPropertyGroup_originalProperty(ctx, field) + case "parentProperty": + return ec.fieldContext_MergedPropertyGroup_parentProperty(ctx, field) + case "original": + return ec.fieldContext_MergedPropertyGroup_original(ctx, field) + case "parent": + return ec.fieldContext_MergedPropertyGroup_parent(ctx, field) + case "schema": + return ec.fieldContext_MergedPropertyGroup_schema(ctx, field) + case "linkedDataset": + return ec.fieldContext_MergedPropertyGroup_linkedDataset(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedPropertyGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_originalProperty(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_originalProperty(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyGroup().OriginalProperty(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_originalProperty(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_parentProperty(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_parentProperty(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyGroup().ParentProperty(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_parentProperty(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_original(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_original(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyGroup().Original(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyGroup) + fc.Result = res + return ec.marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_original(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertyGroup_id(ctx, field) + case "schemaId": + return ec.fieldContext_PropertyGroup_schemaId(ctx, field) + case "schemaGroupId": + return ec.fieldContext_PropertyGroup_schemaGroupId(ctx, field) + case "fields": + return ec.fieldContext_PropertyGroup_fields(ctx, field) + case "schema": + return ec.fieldContext_PropertyGroup_schema(ctx, field) + case "schemaGroup": + return ec.fieldContext_PropertyGroup_schemaGroup(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyGroup().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyGroup) + fc.Result = res + return ec.marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertyGroup_id(ctx, field) + case "schemaId": + return ec.fieldContext_PropertyGroup_schemaId(ctx, field) + case "schemaGroupId": + return ec.fieldContext_PropertyGroup_schemaGroupId(ctx, field) + case "fields": + return ec.fieldContext_PropertyGroup_fields(ctx, field) + case "schema": + return ec.fieldContext_PropertyGroup_schema(ctx, field) + case "schemaGroup": + return ec.fieldContext_PropertyGroup_schemaGroup(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyGroup().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedPropertyGroup_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_linkedDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyGroup().LinkedDataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MergedPropertyGroup_linkedDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveInfoboxFieldPayload_infoboxFieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.InfoboxFieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MoveInfoboxFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveInfoboxFieldPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MoveInfoboxFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _MoveInfoboxFieldPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveInfoboxFieldPayload_index(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Index, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MoveInfoboxFieldPayload_index(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MoveInfoboxFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveLayerPayload_layerId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LayerID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MoveLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _MoveLayerPayload_fromParentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveLayerPayload_fromParentLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FromParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MoveLayerPayload_fromParentLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MoveLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MoveLayerPayload_toParentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveLayerPayload_toParentLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ToParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MoveLayerPayload_toParentLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MoveLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MoveLayerPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveLayerPayload_index(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Index, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_MoveLayerPayload_index(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MoveLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Mutation_createAsset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createAsset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateAsset(rctx, fc.Args["input"].(gqlmodel.CreateAssetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.CreateAssetPayload) + fc.Result = res + return ec.marshalOCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateAssetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_createAsset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "asset": + return ec.fieldContext_CreateAssetPayload_asset(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type CreateAssetPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_createAsset_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeAsset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeAsset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveAsset(rctx, fc.Args["input"].(gqlmodel.RemoveAssetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.RemoveAssetPayload) + fc.Result = res + return ec.marshalORemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveAssetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_removeAsset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "assetId": + return ec.fieldContext_RemoveAssetPayload_assetId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveAssetPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removeAsset_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_signup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_signup(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().Signup(rctx, fc.Args["input"].(gqlmodel.SignupInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.SignupPayload) + fc.Result = res + return ec.marshalOSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSignupPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_signup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "user": + return ec.fieldContext_SignupPayload_user(ctx, field) + case "team": + return ec.fieldContext_SignupPayload_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type SignupPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_signup_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateMe(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateMe(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateMe(rctx, fc.Args["input"].(gqlmodel.UpdateMeInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateMePayload) + fc.Result = res + return ec.marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMePayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_updateMe(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "me": + return ec.fieldContext_UpdateMePayload_me(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateMePayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_updateMe_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeMyAuth(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeMyAuth(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveMyAuth(rctx, fc.Args["input"].(gqlmodel.RemoveMyAuthInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateMePayload) + fc.Result = res + return ec.marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMePayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_removeMyAuth(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "me": + return ec.fieldContext_UpdateMePayload_me(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateMePayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removeMyAuth_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_deleteMe(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_deleteMe(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().DeleteMe(rctx, fc.Args["input"].(gqlmodel.DeleteMeInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DeleteMePayload) + fc.Result = res + return ec.marshalODeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteMePayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_deleteMe(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "userId": + return ec.fieldContext_DeleteMePayload_userId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DeleteMePayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_deleteMe_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_createTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createTeam(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateTeam(rctx, fc.Args["input"].(gqlmodel.CreateTeamInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.CreateTeamPayload) + fc.Result = res + return ec.marshalOCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTeamPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_createTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "team": + return ec.fieldContext_CreateTeamPayload_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type CreateTeamPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_createTeam_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_deleteTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_deleteTeam(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().DeleteTeam(rctx, fc.Args["input"].(gqlmodel.DeleteTeamInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DeleteTeamPayload) + fc.Result = res + return ec.marshalODeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteTeamPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_deleteTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "teamId": + return ec.fieldContext_DeleteTeamPayload_teamId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DeleteTeamPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_deleteTeam_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateTeam(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateTeam(rctx, fc.Args["input"].(gqlmodel.UpdateTeamInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateTeamPayload) + fc.Result = res + return ec.marshalOUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTeamPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_updateTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "team": + return ec.fieldContext_UpdateTeamPayload_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateTeamPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_updateTeam_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addMemberToTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addMemberToTeam(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddMemberToTeam(rctx, fc.Args["input"].(gqlmodel.AddMemberToTeamInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AddMemberToTeamPayload) + fc.Result = res + return ec.marshalOAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddMemberToTeamPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_addMemberToTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "team": + return ec.fieldContext_AddMemberToTeamPayload_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddMemberToTeamPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_addMemberToTeam_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeMemberFromTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeMemberFromTeam(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveMemberFromTeam(rctx, fc.Args["input"].(gqlmodel.RemoveMemberFromTeamInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.RemoveMemberFromTeamPayload) + fc.Result = res + return ec.marshalORemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMemberFromTeamPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_removeMemberFromTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "team": + return ec.fieldContext_RemoveMemberFromTeamPayload_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveMemberFromTeamPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removeMemberFromTeam_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateMemberOfTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateMemberOfTeam(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateMemberOfTeam(rctx, fc.Args["input"].(gqlmodel.UpdateMemberOfTeamInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateMemberOfTeamPayload) + fc.Result = res + return ec.marshalOUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMemberOfTeamPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_updateMemberOfTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "team": + return ec.fieldContext_UpdateMemberOfTeamPayload_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateMemberOfTeamPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_updateMemberOfTeam_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_createProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createProject(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateProject(rctx, fc.Args["input"].(gqlmodel.CreateProjectInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ProjectPayload) + fc.Result = res + return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_createProject(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "project": + return ec.fieldContext_ProjectPayload_project(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_createProject_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateProject(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateProject(rctx, fc.Args["input"].(gqlmodel.UpdateProjectInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ProjectPayload) + fc.Result = res + return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_updateProject(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "project": + return ec.fieldContext_ProjectPayload_project(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_updateProject_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_publishProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_publishProject(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().PublishProject(rctx, fc.Args["input"].(gqlmodel.PublishProjectInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ProjectPayload) + fc.Result = res + return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_publishProject(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "project": + return ec.fieldContext_ProjectPayload_project(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_publishProject_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_deleteProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_deleteProject(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().DeleteProject(rctx, fc.Args["input"].(gqlmodel.DeleteProjectInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DeleteProjectPayload) + fc.Result = res + return ec.marshalODeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteProjectPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_deleteProject(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "projectId": + return ec.fieldContext_DeleteProjectPayload_projectId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DeleteProjectPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_deleteProject_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_createScene(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createScene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateScene(rctx, fc.Args["input"].(gqlmodel.CreateSceneInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.CreateScenePayload) + fc.Result = res + return ec.marshalOCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateScenePayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_createScene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_CreateScenePayload_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type CreateScenePayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_createScene_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addWidget(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddWidget(rctx, fc.Args["input"].(gqlmodel.AddWidgetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AddWidgetPayload) + fc.Result = res + return ec.marshalOAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddWidgetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_addWidget(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_AddWidgetPayload_scene(ctx, field) + case "sceneWidget": + return ec.fieldContext_AddWidgetPayload_sceneWidget(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddWidgetPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_addWidget_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateWidget(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateWidget(rctx, fc.Args["input"].(gqlmodel.UpdateWidgetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateWidgetPayload) + fc.Result = res + return ec.marshalOUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_updateWidget(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_UpdateWidgetPayload_scene(ctx, field) + case "sceneWidget": + return ec.fieldContext_UpdateWidgetPayload_sceneWidget(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateWidgetPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_updateWidget_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateWidgetAlignSystem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateWidgetAlignSystem(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateWidgetAlignSystem(rctx, fc.Args["input"].(gqlmodel.UpdateWidgetAlignSystemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateWidgetAlignSystemPayload) + fc.Result = res + return ec.marshalOUpdateWidgetAlignSystemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetAlignSystemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_updateWidgetAlignSystem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_UpdateWidgetAlignSystemPayload_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateWidgetAlignSystemPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_updateWidgetAlignSystem_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeWidget(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveWidget(rctx, fc.Args["input"].(gqlmodel.RemoveWidgetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.RemoveWidgetPayload) + fc.Result = res + return ec.marshalORemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveWidgetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_removeWidget(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_RemoveWidgetPayload_scene(ctx, field) + case "widgetId": + return ec.fieldContext_RemoveWidgetPayload_widgetId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveWidgetPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removeWidget_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_installPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_installPlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().InstallPlugin(rctx, fc.Args["input"].(gqlmodel.InstallPluginInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.InstallPluginPayload) + fc.Result = res + return ec.marshalOInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInstallPluginPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_installPlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_InstallPluginPayload_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_InstallPluginPayload_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type InstallPluginPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_installPlugin_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_uninstallPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_uninstallPlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UninstallPlugin(rctx, fc.Args["input"].(gqlmodel.UninstallPluginInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UninstallPluginPayload) + fc.Result = res + return ec.marshalOUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUninstallPluginPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_uninstallPlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_UninstallPluginPayload_pluginId(ctx, field) + case "scene": + return ec.fieldContext_UninstallPluginPayload_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UninstallPluginPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_uninstallPlugin_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_uploadPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_uploadPlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UploadPlugin(rctx, fc.Args["input"].(gqlmodel.UploadPluginInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UploadPluginPayload) + fc.Result = res + return ec.marshalOUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadPluginPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_uploadPlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "plugin": + return ec.fieldContext_UploadPluginPayload_plugin(ctx, field) + case "scene": + return ec.fieldContext_UploadPluginPayload_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_UploadPluginPayload_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UploadPluginPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_uploadPlugin_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_upgradePlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_upgradePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpgradePlugin(rctx, fc.Args["input"].(gqlmodel.UpgradePluginInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpgradePluginPayload) + fc.Result = res + return ec.marshalOUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpgradePluginPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_upgradePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_UpgradePluginPayload_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_UpgradePluginPayload_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpgradePluginPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_upgradePlugin_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addCluster(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddCluster(rctx, fc.Args["input"].(gqlmodel.AddClusterInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AddClusterPayload) + fc.Result = res + return ec.marshalOAddClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddClusterPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_addCluster(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_AddClusterPayload_scene(ctx, field) + case "cluster": + return ec.fieldContext_AddClusterPayload_cluster(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddClusterPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_addCluster_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateCluster(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateCluster(rctx, fc.Args["input"].(gqlmodel.UpdateClusterInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateClusterPayload) + fc.Result = res + return ec.marshalOUpdateClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateClusterPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_updateCluster(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_UpdateClusterPayload_scene(ctx, field) + case "cluster": + return ec.fieldContext_UpdateClusterPayload_cluster(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateClusterPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_updateCluster_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeCluster(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveCluster(rctx, fc.Args["input"].(gqlmodel.RemoveClusterInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.RemoveClusterPayload) + fc.Result = res + return ec.marshalORemoveClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveClusterPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_removeCluster(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_RemoveClusterPayload_scene(ctx, field) + case "clusterId": + return ec.fieldContext_RemoveClusterPayload_clusterId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveClusterPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removeCluster_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateDatasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateDatasetSchema(rctx, fc.Args["input"].(gqlmodel.UpdateDatasetSchemaInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateDatasetSchemaPayload) + fc.Result = res + return ec.marshalOUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateDatasetSchemaPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_updateDatasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetSchema": + return ec.fieldContext_UpdateDatasetSchemaPayload_datasetSchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateDatasetSchemaPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_updateDatasetSchema_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_syncDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_syncDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().SyncDataset(rctx, fc.Args["input"].(gqlmodel.SyncDatasetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.SyncDatasetPayload) + fc.Result = res + return ec.marshalOSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSyncDatasetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_syncDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sceneId": + return ec.fieldContext_SyncDatasetPayload_sceneId(ctx, field) + case "url": + return ec.fieldContext_SyncDatasetPayload_url(ctx, field) + case "datasetSchema": + return ec.fieldContext_SyncDatasetPayload_datasetSchema(ctx, field) + case "dataset": + return ec.fieldContext_SyncDatasetPayload_dataset(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type SyncDatasetPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_syncDataset_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addDynamicDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addDynamicDatasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddDynamicDatasetSchema(rctx, fc.Args["input"].(gqlmodel.AddDynamicDatasetSchemaInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AddDynamicDatasetSchemaPayload) + fc.Result = res + return ec.marshalOAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetSchemaPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_addDynamicDatasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetSchema": + return ec.fieldContext_AddDynamicDatasetSchemaPayload_datasetSchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddDynamicDatasetSchemaPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_addDynamicDatasetSchema_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addDynamicDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addDynamicDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddDynamicDataset(rctx, fc.Args["input"].(gqlmodel.AddDynamicDatasetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AddDynamicDatasetPayload) + fc.Result = res + return ec.marshalOAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_addDynamicDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetSchema": + return ec.fieldContext_AddDynamicDatasetPayload_datasetSchema(ctx, field) + case "dataset": + return ec.fieldContext_AddDynamicDatasetPayload_dataset(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddDynamicDatasetPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_addDynamicDataset_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeDatasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveDatasetSchema(rctx, fc.Args["input"].(gqlmodel.RemoveDatasetSchemaInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.RemoveDatasetSchemaPayload) + fc.Result = res + return ec.marshalORemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveDatasetSchemaPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_removeDatasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "schemaId": + return ec.fieldContext_RemoveDatasetSchemaPayload_schemaId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveDatasetSchemaPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removeDatasetSchema_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_importDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_importDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().ImportDataset(rctx, fc.Args["input"].(gqlmodel.ImportDatasetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ImportDatasetPayload) + fc.Result = res + return ec.marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_importDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetSchema": + return ec.fieldContext_ImportDatasetPayload_datasetSchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ImportDatasetPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_importDataset_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_importDatasetFromGoogleSheet(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_importDatasetFromGoogleSheet(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().ImportDatasetFromGoogleSheet(rctx, fc.Args["input"].(gqlmodel.ImportDatasetFromGoogleSheetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ImportDatasetPayload) + fc.Result = res + return ec.marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_importDatasetFromGoogleSheet(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetSchema": + return ec.fieldContext_ImportDatasetPayload_datasetSchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ImportDatasetPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_importDatasetFromGoogleSheet_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addDatasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddDatasetSchema(rctx, fc.Args["input"].(gqlmodel.AddDatasetSchemaInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AddDatasetSchemaPayload) + fc.Result = res + return ec.marshalOAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDatasetSchemaPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_addDatasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetSchema": + return ec.fieldContext_AddDatasetSchemaPayload_datasetSchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddDatasetSchemaPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_addDatasetSchema_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updatePropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updatePropertyValue(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdatePropertyValue(rctx, fc.Args["input"].(gqlmodel.UpdatePropertyValueInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyFieldPayload) + fc.Result = res + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_updatePropertyValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyFieldPayload_property(ctx, field) + case "propertyField": + return ec.fieldContext_PropertyFieldPayload_propertyField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_updatePropertyValue_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removePropertyField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removePropertyField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemovePropertyField(rctx, fc.Args["input"].(gqlmodel.RemovePropertyFieldInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyFieldPayload) + fc.Result = res + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_removePropertyField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyFieldPayload_property(ctx, field) + case "propertyField": + return ec.fieldContext_PropertyFieldPayload_propertyField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removePropertyField_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_uploadFileToProperty(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_uploadFileToProperty(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UploadFileToProperty(rctx, fc.Args["input"].(gqlmodel.UploadFileToPropertyInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyFieldPayload) + fc.Result = res + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_uploadFileToProperty(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyFieldPayload_property(ctx, field) + case "propertyField": + return ec.fieldContext_PropertyFieldPayload_propertyField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_uploadFileToProperty_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_linkDatasetToPropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_linkDatasetToPropertyValue(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().LinkDatasetToPropertyValue(rctx, fc.Args["input"].(gqlmodel.LinkDatasetToPropertyValueInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyFieldPayload) + fc.Result = res + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_linkDatasetToPropertyValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyFieldPayload_property(ctx, field) + case "propertyField": + return ec.fieldContext_PropertyFieldPayload_propertyField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_linkDatasetToPropertyValue_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_unlinkPropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_unlinkPropertyValue(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UnlinkPropertyValue(rctx, fc.Args["input"].(gqlmodel.UnlinkPropertyValueInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyFieldPayload) + fc.Result = res + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_unlinkPropertyValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyFieldPayload_property(ctx, field) + case "propertyField": + return ec.fieldContext_PropertyFieldPayload_propertyField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_unlinkPropertyValue_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addPropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addPropertyItem(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddPropertyItem(rctx, fc.Args["input"].(gqlmodel.AddPropertyItemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyItemPayload) + fc.Result = res + return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_addPropertyItem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyItemPayload_property(ctx, field) + case "propertyItem": + return ec.fieldContext_PropertyItemPayload_propertyItem(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyItemPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_addPropertyItem_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_movePropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_movePropertyItem(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().MovePropertyItem(rctx, fc.Args["input"].(gqlmodel.MovePropertyItemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyItemPayload) + fc.Result = res + return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_movePropertyItem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyItemPayload_property(ctx, field) + case "propertyItem": + return ec.fieldContext_PropertyItemPayload_propertyItem(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyItemPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_movePropertyItem_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removePropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removePropertyItem(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemovePropertyItem(rctx, fc.Args["input"].(gqlmodel.RemovePropertyItemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyItemPayload) + fc.Result = res + return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_removePropertyItem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyItemPayload_property(ctx, field) + case "propertyItem": + return ec.fieldContext_PropertyItemPayload_propertyItem(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyItemPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removePropertyItem_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updatePropertyItems(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updatePropertyItems(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdatePropertyItems(rctx, fc.Args["input"].(gqlmodel.UpdatePropertyItemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyItemPayload) + fc.Result = res + return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_updatePropertyItems(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyItemPayload_property(ctx, field) + case "propertyItem": + return ec.fieldContext_PropertyItemPayload_propertyItem(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyItemPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_updatePropertyItems_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addLayerItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addLayerItem(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddLayerItem(rctx, fc.Args["input"].(gqlmodel.AddLayerItemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AddLayerItemPayload) + fc.Result = res + return ec.marshalOAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_addLayerItem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_AddLayerItemPayload_layer(ctx, field) + case "parentLayer": + return ec.fieldContext_AddLayerItemPayload_parentLayer(ctx, field) + case "index": + return ec.fieldContext_AddLayerItemPayload_index(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddLayerItemPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_addLayerItem_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addLayerGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addLayerGroup(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddLayerGroup(rctx, fc.Args["input"].(gqlmodel.AddLayerGroupInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AddLayerGroupPayload) + fc.Result = res + return ec.marshalOAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerGroupPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_addLayerGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_AddLayerGroupPayload_layer(ctx, field) + case "parentLayer": + return ec.fieldContext_AddLayerGroupPayload_parentLayer(ctx, field) + case "index": + return ec.fieldContext_AddLayerGroupPayload_index(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddLayerGroupPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_addLayerGroup_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveLayer(rctx, fc.Args["input"].(gqlmodel.RemoveLayerInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.RemoveLayerPayload) + fc.Result = res + return ec.marshalORemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveLayerPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_removeLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layerId": + return ec.fieldContext_RemoveLayerPayload_layerId(ctx, field) + case "parentLayer": + return ec.fieldContext_RemoveLayerPayload_parentLayer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveLayerPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removeLayer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateLayer(rctx, fc.Args["input"].(gqlmodel.UpdateLayerInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateLayerPayload) + fc.Result = res + return ec.marshalOUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateLayerPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_updateLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_UpdateLayerPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateLayerPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_updateLayer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_moveLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_moveLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().MoveLayer(rctx, fc.Args["input"].(gqlmodel.MoveLayerInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.MoveLayerPayload) + fc.Result = res + return ec.marshalOMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveLayerPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_moveLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layerId": + return ec.fieldContext_MoveLayerPayload_layerId(ctx, field) + case "fromParentLayer": + return ec.fieldContext_MoveLayerPayload_fromParentLayer(ctx, field) + case "toParentLayer": + return ec.fieldContext_MoveLayerPayload_toParentLayer(ctx, field) + case "index": + return ec.fieldContext_MoveLayerPayload_index(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MoveLayerPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_moveLayer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_createInfobox(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createInfobox(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateInfobox(rctx, fc.Args["input"].(gqlmodel.CreateInfoboxInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.CreateInfoboxPayload) + fc.Result = res + return ec.marshalOCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateInfoboxPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_createInfobox(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_CreateInfoboxPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type CreateInfoboxPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_createInfobox_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeInfobox(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeInfobox(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveInfobox(rctx, fc.Args["input"].(gqlmodel.RemoveInfoboxInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.RemoveInfoboxPayload) + fc.Result = res + return ec.marshalORemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_removeInfobox(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_RemoveInfoboxPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveInfoboxPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removeInfobox_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addInfoboxField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddInfoboxField(rctx, fc.Args["input"].(gqlmodel.AddInfoboxFieldInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AddInfoboxFieldPayload) + fc.Result = res + return ec.marshalOAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddInfoboxFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_addInfoboxField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "infoboxField": + return ec.fieldContext_AddInfoboxFieldPayload_infoboxField(ctx, field) + case "layer": + return ec.fieldContext_AddInfoboxFieldPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddInfoboxFieldPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_addInfoboxField_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_moveInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_moveInfoboxField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().MoveInfoboxField(rctx, fc.Args["input"].(gqlmodel.MoveInfoboxFieldInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.MoveInfoboxFieldPayload) + fc.Result = res + return ec.marshalOMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveInfoboxFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_moveInfoboxField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "infoboxFieldId": + return ec.fieldContext_MoveInfoboxFieldPayload_infoboxFieldId(ctx, field) + case "layer": + return ec.fieldContext_MoveInfoboxFieldPayload_layer(ctx, field) + case "index": + return ec.fieldContext_MoveInfoboxFieldPayload_index(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MoveInfoboxFieldPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_moveInfoboxField_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeInfoboxField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveInfoboxField(rctx, fc.Args["input"].(gqlmodel.RemoveInfoboxFieldInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.RemoveInfoboxFieldPayload) + fc.Result = res + return ec.marshalORemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_removeInfoboxField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "infoboxFieldId": + return ec.fieldContext_RemoveInfoboxFieldPayload_infoboxFieldId(ctx, field) + case "layer": + return ec.fieldContext_RemoveInfoboxFieldPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveInfoboxFieldPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removeInfoboxField_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_importLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_importLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().ImportLayer(rctx, fc.Args["input"].(gqlmodel.ImportLayerInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ImportLayerPayload) + fc.Result = res + return ec.marshalOImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportLayerPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_importLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layers": + return ec.fieldContext_ImportLayerPayload_layers(ctx, field) + case "parentLayer": + return ec.fieldContext_ImportLayerPayload_parentLayer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ImportLayerPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_importLayer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_attachTagToLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_attachTagToLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AttachTagToLayer(rctx, fc.Args["input"].(gqlmodel.AttachTagToLayerInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AttachTagToLayerPayload) + fc.Result = res + return ec.marshalOAttachTagToLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagToLayerPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_attachTagToLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_AttachTagToLayerPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AttachTagToLayerPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_attachTagToLayer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_detachTagFromLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_detachTagFromLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().DetachTagFromLayer(rctx, fc.Args["input"].(gqlmodel.DetachTagFromLayerInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DetachTagFromLayerPayload) + fc.Result = res + return ec.marshalODetachTagFromLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagFromLayerPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_detachTagFromLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_DetachTagFromLayerPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DetachTagFromLayerPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_detachTagFromLayer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_createTagItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createTagItem(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateTagItem(rctx, fc.Args["input"].(gqlmodel.CreateTagItemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.CreateTagItemPayload) + fc.Result = res + return ec.marshalOCreateTagItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_createTagItem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tag": + return ec.fieldContext_CreateTagItemPayload_tag(ctx, field) + case "parent": + return ec.fieldContext_CreateTagItemPayload_parent(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type CreateTagItemPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_createTagItem_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_createTagGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createTagGroup(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateTagGroup(rctx, fc.Args["input"].(gqlmodel.CreateTagGroupInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.CreateTagGroupPayload) + fc.Result = res + return ec.marshalOCreateTagGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagGroupPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_createTagGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tag": + return ec.fieldContext_CreateTagGroupPayload_tag(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type CreateTagGroupPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_createTagGroup_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_attachTagItemToGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_attachTagItemToGroup(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AttachTagItemToGroup(rctx, fc.Args["input"].(gqlmodel.AttachTagItemToGroupInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AttachTagItemToGroupPayload) + fc.Result = res + return ec.marshalOAttachTagItemToGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagItemToGroupPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_attachTagItemToGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tag": + return ec.fieldContext_AttachTagItemToGroupPayload_tag(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AttachTagItemToGroupPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_attachTagItemToGroup_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_detachTagItemFromGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_detachTagItemFromGroup(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().DetachTagItemFromGroup(rctx, fc.Args["input"].(gqlmodel.DetachTagItemFromGroupInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DetachTagItemFromGroupPayload) + fc.Result = res + return ec.marshalODetachTagItemFromGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagItemFromGroupPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_detachTagItemFromGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tag": + return ec.fieldContext_DetachTagItemFromGroupPayload_tag(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DetachTagItemFromGroupPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_detachTagItemFromGroup_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateTag(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateTag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateTag(rctx, fc.Args["input"].(gqlmodel.UpdateTagInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateTagPayload) + fc.Result = res + return ec.marshalOUpdateTagPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTagPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_updateTag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tag": + return ec.fieldContext_UpdateTagPayload_tag(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateTagPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_updateTag_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeTag(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeTag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveTag(rctx, fc.Args["input"].(gqlmodel.RemoveTagInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.RemoveTagPayload) + fc.Result = res + return ec.marshalORemoveTagPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveTagPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_removeTag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tagId": + return ec.fieldContext_RemoveTagPayload_tagId(ctx, field) + case "updatedLayers": + return ec.fieldContext_RemoveTagPayload_updatedLayers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveTagPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removeTag_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PageInfo_startCursor(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.StartCursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*usecase.Cursor) + fc.Result = res + return ec.marshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PageInfo_startCursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PageInfo", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PageInfo_endCursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PageInfo_endCursor(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.EndCursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*usecase.Cursor) + fc.Result = res + return ec.marshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PageInfo_endCursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PageInfo", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PageInfo_hasNextPage(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PageInfo_hasNextPage(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.HasNextPage, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PageInfo_hasNextPage(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PageInfo", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PageInfo_hasPreviousPage(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PageInfo_hasPreviousPage(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.HasPreviousPage, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PageInfo_hasPreviousPage(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PageInfo", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_sceneId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_version(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_version(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Version, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_version(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_author(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_author(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Author, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_author(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_repositoryUrl(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_repositoryUrl(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RepositoryURL, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_repositoryUrl(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_propertySchemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertySchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_propertySchemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_extensions(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_extensions(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Extensions, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.PluginExtension) + fc.Result = res + return ec.marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_extensions(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extensionId": + return ec.fieldContext_PluginExtension_extensionId(ctx, field) + case "pluginId": + return ec.fieldContext_PluginExtension_pluginId(ctx, field) + case "type": + return ec.fieldContext_PluginExtension_type(ctx, field) + case "name": + return ec.fieldContext_PluginExtension_name(ctx, field) + case "description": + return ec.fieldContext_PluginExtension_description(ctx, field) + case "icon": + return ec.fieldContext_PluginExtension_icon(ctx, field) + case "singleOnly": + return ec.fieldContext_PluginExtension_singleOnly(ctx, field) + case "widgetLayout": + return ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + case "visualizer": + return ec.fieldContext_PluginExtension_visualizer(ctx, field) + case "propertySchemaId": + return ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + case "allTranslatedName": + return ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + case "plugin": + return ec.fieldContext_PluginExtension_plugin(ctx, field) + case "sceneWidget": + return ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + case "propertySchema": + return ec.fieldContext_PluginExtension_propertySchema(ctx, field) + case "translatedName": + return ec.fieldContext_PluginExtension_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginExtension", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ScenePlugin, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ScenePlugin) + fc.Result = res + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Plugin_scenePlugin_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Plugin_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedDescription, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_allTranslatedDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_allTranslatedName(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedName, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_allTranslatedName(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Plugin().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Plugin_translatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_translatedName(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Plugin().TranslatedName(rctx, obj, fc.Args["lang"].(*language.Tag)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_translatedName(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Plugin_translatedName_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Plugin_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_translatedDescription(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Plugin().TranslatedDescription(rctx, obj, fc.Args["lang"].(*language.Tag)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_translatedDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Plugin_translatedDescription_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Plugin_propertySchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_propertySchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Plugin().PropertySchema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Plugin_propertySchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_extensionId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_extensionId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_pluginId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.PluginExtensionType) + fc.Result = res + return ec.marshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type PluginExtensionType does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_icon(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_icon(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Icon, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_icon(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_singleOnly(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_singleOnly(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SingleOnly, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*bool) + fc.Result = res + return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_singleOnly(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_widgetLayout(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.WidgetLayout, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetLayout) + fc.Result = res + return ec.marshalOWidgetLayout2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLayout(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_widgetLayout(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extendable": + return ec.fieldContext_WidgetLayout_extendable(ctx, field) + case "extended": + return ec.fieldContext_WidgetLayout_extended(ctx, field) + case "floating": + return ec.fieldContext_WidgetLayout_floating(ctx, field) + case "defaultLocation": + return ec.fieldContext_WidgetLayout_defaultLocation(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetLayout", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_visualizer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_visualizer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Visualizer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Visualizer) + fc.Result = res + return ec.marshalOVisualizer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_visualizer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Visualizer does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertySchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_propertySchemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedName, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_allTranslatedName(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedDescription, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_allTranslatedDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PluginExtension().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PluginExtension().SceneWidget(rctx, obj, fc.Args["sceneId"].(gqlmodel.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.SceneWidget) + fc.Result = res + return ec.marshalOSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_sceneWidget(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_SceneWidget_id(ctx, field) + case "pluginId": + return ec.fieldContext_SceneWidget_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_SceneWidget_extensionId(ctx, field) + case "propertyId": + return ec.fieldContext_SceneWidget_propertyId(ctx, field) + case "enabled": + return ec.fieldContext_SceneWidget_enabled(ctx, field) + case "extended": + return ec.fieldContext_SceneWidget_extended(ctx, field) + case "plugin": + return ec.fieldContext_SceneWidget_plugin(ctx, field) + case "extension": + return ec.fieldContext_SceneWidget_extension(ctx, field) + case "property": + return ec.fieldContext_SceneWidget_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type SceneWidget", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_PluginExtension_sceneWidget_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_propertySchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_propertySchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PluginExtension().PropertySchema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_propertySchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_translatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_translatedName(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PluginExtension().TranslatedName(rctx, obj, fc.Args["lang"].(*language.Tag)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_translatedName(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_PluginExtension_translatedName_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PluginExtension().TranslatedDescription(rctx, obj, fc.Args["lang"].(*language.Tag)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PluginExtension_translatedDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_PluginExtension_translatedDescription_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Project_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_isArchived(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_isArchived(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsArchived, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_isArchived(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_isBasicAuthActive(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_isBasicAuthActive(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsBasicAuthActive, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_isBasicAuthActive(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_basicAuthUsername(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_basicAuthUsername(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.BasicAuthUsername, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_basicAuthUsername(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_basicAuthPassword(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_basicAuthPassword(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.BasicAuthPassword, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_basicAuthPassword(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_createdAt(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.CreatedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(time.Time) + fc.Result = res + return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_createdAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_updatedAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_updatedAt(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UpdatedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(time.Time) + fc.Result = res + return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_updatedAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_publishedAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_publishedAt(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PublishedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*time.Time) + fc.Result = res + return ec.marshalODateTime2แš–timeแšTime(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_publishedAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_alias(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_alias(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Alias, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_alias(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_publicTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_publicTitle(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PublicTitle, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_publicTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_publicDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_publicDescription(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PublicDescription, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_publicDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_publicImage(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_publicImage(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PublicImage, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_publicImage(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_publicNoIndex(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_publicNoIndex(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PublicNoIndex, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_publicNoIndex(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_imageUrl(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_imageUrl(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ImageURL, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*url.URL) + fc.Result = res + return ec.marshalOURL2แš–netแš‹urlแšURL(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_imageUrl(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type URL does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_teamId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TeamID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_teamId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_visualizer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_visualizer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Visualizer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Visualizer) + fc.Result = res + return ec.marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_visualizer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Visualizer does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_publishmentStatus(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_publishmentStatus(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PublishmentStatus, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.PublishmentStatus) + fc.Result = res + return ec.marshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishmentStatus(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_publishmentStatus(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type PublishmentStatus does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Project().Team(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Team) + fc.Result = res + return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Project_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Project().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _ProjectAliasAvailability_alias(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectAliasAvailability) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectAliasAvailability_alias(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Alias, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ProjectAliasAvailability_alias(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectAliasAvailability", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _ProjectAliasAvailability_available(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectAliasAvailability) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectAliasAvailability_available(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Available, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ProjectAliasAvailability_available(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectAliasAvailability", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _ProjectConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectConnection_edges(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Edges, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.ProjectEdge) + fc.Result = res + return ec.marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdgeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ProjectConnection_edges(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "cursor": + return ec.fieldContext_ProjectEdge_cursor(ctx, field) + case "node": + return ec.fieldContext_ProjectEdge_node(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectEdge", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _ProjectConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectConnection_nodes(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Nodes, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.Project) + fc.Result = res + return ec.marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ProjectConnection_nodes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Project_id(ctx, field) + case "isArchived": + return ec.fieldContext_Project_isArchived(ctx, field) + case "isBasicAuthActive": + return ec.fieldContext_Project_isBasicAuthActive(ctx, field) + case "basicAuthUsername": + return ec.fieldContext_Project_basicAuthUsername(ctx, field) + case "basicAuthPassword": + return ec.fieldContext_Project_basicAuthPassword(ctx, field) + case "createdAt": + return ec.fieldContext_Project_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Project_updatedAt(ctx, field) + case "publishedAt": + return ec.fieldContext_Project_publishedAt(ctx, field) + case "name": + return ec.fieldContext_Project_name(ctx, field) + case "description": + return ec.fieldContext_Project_description(ctx, field) + case "alias": + return ec.fieldContext_Project_alias(ctx, field) + case "publicTitle": + return ec.fieldContext_Project_publicTitle(ctx, field) + case "publicDescription": + return ec.fieldContext_Project_publicDescription(ctx, field) + case "publicImage": + return ec.fieldContext_Project_publicImage(ctx, field) + case "publicNoIndex": + return ec.fieldContext_Project_publicNoIndex(ctx, field) + case "imageUrl": + return ec.fieldContext_Project_imageUrl(ctx, field) + case "teamId": + return ec.fieldContext_Project_teamId(ctx, field) + case "visualizer": + return ec.fieldContext_Project_visualizer(ctx, field) + case "publishmentStatus": + return ec.fieldContext_Project_publishmentStatus(ctx, field) + case "team": + return ec.fieldContext_Project_team(ctx, field) + case "scene": + return ec.fieldContext_Project_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Project", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _ProjectConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectConnection_pageInfo(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PageInfo, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.PageInfo) + fc.Result = res + return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ProjectConnection_pageInfo(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "startCursor": + return ec.fieldContext_PageInfo_startCursor(ctx, field) + case "endCursor": + return ec.fieldContext_PageInfo_endCursor(ctx, field) + case "hasNextPage": + return ec.fieldContext_PageInfo_hasNextPage(ctx, field) + case "hasPreviousPage": + return ec.fieldContext_PageInfo_hasPreviousPage(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PageInfo", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _ProjectConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectConnection_totalCount(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TotalCount, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ProjectConnection_totalCount(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _ProjectEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectEdge_cursor(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(usecase.Cursor) + fc.Result = res + return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ProjectEdge_cursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _ProjectEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectEdge_node(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Node, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Project) + fc.Result = res + return ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ProjectEdge_node(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Project_id(ctx, field) + case "isArchived": + return ec.fieldContext_Project_isArchived(ctx, field) + case "isBasicAuthActive": + return ec.fieldContext_Project_isBasicAuthActive(ctx, field) + case "basicAuthUsername": + return ec.fieldContext_Project_basicAuthUsername(ctx, field) + case "basicAuthPassword": + return ec.fieldContext_Project_basicAuthPassword(ctx, field) + case "createdAt": + return ec.fieldContext_Project_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Project_updatedAt(ctx, field) + case "publishedAt": + return ec.fieldContext_Project_publishedAt(ctx, field) + case "name": + return ec.fieldContext_Project_name(ctx, field) + case "description": + return ec.fieldContext_Project_description(ctx, field) + case "alias": + return ec.fieldContext_Project_alias(ctx, field) + case "publicTitle": + return ec.fieldContext_Project_publicTitle(ctx, field) + case "publicDescription": + return ec.fieldContext_Project_publicDescription(ctx, field) + case "publicImage": + return ec.fieldContext_Project_publicImage(ctx, field) + case "publicNoIndex": + return ec.fieldContext_Project_publicNoIndex(ctx, field) + case "imageUrl": + return ec.fieldContext_Project_imageUrl(ctx, field) + case "teamId": + return ec.fieldContext_Project_teamId(ctx, field) + case "visualizer": + return ec.fieldContext_Project_visualizer(ctx, field) + case "publishmentStatus": + return ec.fieldContext_Project_publishmentStatus(ctx, field) + case "team": + return ec.fieldContext_Project_team(ctx, field) + case "scene": + return ec.fieldContext_Project_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Project", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _ProjectPayload_project(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectPayload_project(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Project, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Project) + fc.Result = res + return ec.marshalNProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ProjectPayload_project(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Project_id(ctx, field) + case "isArchived": + return ec.fieldContext_Project_isArchived(ctx, field) + case "isBasicAuthActive": + return ec.fieldContext_Project_isBasicAuthActive(ctx, field) + case "basicAuthUsername": + return ec.fieldContext_Project_basicAuthUsername(ctx, field) + case "basicAuthPassword": + return ec.fieldContext_Project_basicAuthPassword(ctx, field) + case "createdAt": + return ec.fieldContext_Project_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Project_updatedAt(ctx, field) + case "publishedAt": + return ec.fieldContext_Project_publishedAt(ctx, field) + case "name": + return ec.fieldContext_Project_name(ctx, field) + case "description": + return ec.fieldContext_Project_description(ctx, field) + case "alias": + return ec.fieldContext_Project_alias(ctx, field) + case "publicTitle": + return ec.fieldContext_Project_publicTitle(ctx, field) + case "publicDescription": + return ec.fieldContext_Project_publicDescription(ctx, field) + case "publicImage": + return ec.fieldContext_Project_publicImage(ctx, field) + case "publicNoIndex": + return ec.fieldContext_Project_publicNoIndex(ctx, field) + case "imageUrl": + return ec.fieldContext_Project_imageUrl(ctx, field) + case "teamId": + return ec.fieldContext_Project_teamId(ctx, field) + case "visualizer": + return ec.fieldContext_Project_visualizer(ctx, field) + case "publishmentStatus": + return ec.fieldContext_Project_publishmentStatus(ctx, field) + case "team": + return ec.fieldContext_Project_team(ctx, field) + case "scene": + return ec.fieldContext_Project_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Project", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Property_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Property_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Property_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Property", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Property_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Property_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Property_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Property", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Property_items(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Property_items(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Items, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.PropertyItem) + fc.Result = res + return ec.marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Property_items(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Property", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type PropertyItem does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Property_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Property_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Property().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Property_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Property", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Property_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Property_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Property().Layer(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Property_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Property", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _Property_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Property_merged(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Property().Merged(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.MergedProperty) + fc.Result = res + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Property_merged(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Property", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedProperty_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedProperty_parentId(ctx, field) + case "schemaId": + return ec.fieldContext_MergedProperty_schemaId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_MergedProperty_linkedDatasetId(ctx, field) + case "original": + return ec.fieldContext_MergedProperty_original(ctx, field) + case "parent": + return ec.fieldContext_MergedProperty_parent(ctx, field) + case "schema": + return ec.fieldContext_MergedProperty_schema(ctx, field) + case "linkedDataset": + return ec.fieldContext_MergedProperty_linkedDataset(ctx, field) + case "groups": + return ec.fieldContext_MergedProperty_groups(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedProperty", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyCondition_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyCondition) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyCondition_fieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyCondition_fieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyCondition", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyCondition_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyCondition) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyCondition_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ValueType) + fc.Result = res + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyCondition_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyCondition", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ValueType does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyCondition_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyCondition) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyCondition_value(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Value, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyCondition_value(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyCondition", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyField_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyField_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyField_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_parentId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyField_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyField_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_fieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyField_fieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyField_links(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_links(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Links, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]*gqlmodel.PropertyFieldLink) + fc.Result = res + return ec.marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLinkแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyField_links(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetId": + return ec.fieldContext_PropertyFieldLink_datasetId(ctx, field) + case "datasetSchemaId": + return ec.fieldContext_PropertyFieldLink_datasetSchemaId(ctx, field) + case "datasetSchemaFieldId": + return ec.fieldContext_PropertyFieldLink_datasetSchemaFieldId(ctx, field) + case "dataset": + return ec.fieldContext_PropertyFieldLink_dataset(ctx, field) + case "datasetField": + return ec.fieldContext_PropertyFieldLink_datasetField(ctx, field) + case "datasetSchema": + return ec.fieldContext_PropertyFieldLink_datasetSchema(ctx, field) + case "datasetSchemaField": + return ec.fieldContext_PropertyFieldLink_datasetSchemaField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldLink", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ValueType) + fc.Result = res + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyField_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ValueType does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_value(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Value, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyField_value(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyField_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyField().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyField_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyField().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyField_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyField_field(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_field(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyField().Field(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchemaField) + fc.Result = res + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyField_field(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertySchemaField_type(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaField_title(ctx, field) + case "description": + return ec.fieldContext_PropertySchemaField_description(ctx, field) + case "prefix": + return ec.fieldContext_PropertySchemaField_prefix(ctx, field) + case "suffix": + return ec.fieldContext_PropertySchemaField_suffix(ctx, field) + case "defaultValue": + return ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + case "ui": + return ec.fieldContext_PropertySchemaField_ui(ctx, field) + case "min": + return ec.fieldContext_PropertySchemaField_min(ctx, field) + case "max": + return ec.fieldContext_PropertySchemaField_max(ctx, field) + case "choices": + return ec.fieldContext_PropertySchemaField_choices(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + case "translatedDescription": + return ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyField_actualValue(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_actualValue(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyField().ActualValue(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyField_actualValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyFieldLink_datasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_datasetId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyFieldLink_datasetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyFieldLink_datasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_datasetSchemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyFieldLink_datasetSchemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyFieldLink_datasetSchemaFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_datasetSchemaFieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchemaFieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyFieldLink_datasetSchemaFieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyFieldLink_dataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_dataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyFieldLink().Dataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyFieldLink_dataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyFieldLink_datasetField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_datasetField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyFieldLink().DatasetField(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetField) + fc.Result = res + return ec.marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyFieldLink_datasetField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_DatasetField_fieldId(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetField_schemaId(ctx, field) + case "source": + return ec.fieldContext_DatasetField_source(ctx, field) + case "type": + return ec.fieldContext_DatasetField_type(ctx, field) + case "value": + return ec.fieldContext_DatasetField_value(ctx, field) + case "schema": + return ec.fieldContext_DatasetField_schema(ctx, field) + case "field": + return ec.fieldContext_DatasetField_field(ctx, field) + case "valueRef": + return ec.fieldContext_DatasetField_valueRef(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyFieldLink_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyFieldLink().DatasetSchema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyFieldLink_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyFieldLink_datasetSchemaField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_datasetSchemaField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyFieldLink().DatasetSchemaField(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchemaField) + fc.Result = res + return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyFieldLink_datasetSchemaField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchemaField_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchemaField_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchemaField_name(ctx, field) + case "type": + return ec.fieldContext_DatasetSchemaField_type(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetSchemaField_schemaId(ctx, field) + case "refId": + return ec.fieldContext_DatasetSchemaField_refId(ctx, field) + case "schema": + return ec.fieldContext_DatasetSchemaField_schema(ctx, field) + case "ref": + return ec.fieldContext_DatasetSchemaField_ref(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyFieldPayload_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldPayload_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyFieldPayload_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyFieldPayload_propertyField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldPayload_propertyField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyField, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyField) + fc.Result = res + return ec.marshalOPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyFieldPayload_propertyField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertyField_id(ctx, field) + case "parentId": + return ec.fieldContext_PropertyField_parentId(ctx, field) + case "schemaId": + return ec.fieldContext_PropertyField_schemaId(ctx, field) + case "fieldId": + return ec.fieldContext_PropertyField_fieldId(ctx, field) + case "links": + return ec.fieldContext_PropertyField_links(ctx, field) + case "type": + return ec.fieldContext_PropertyField_type(ctx, field) + case "value": + return ec.fieldContext_PropertyField_value(ctx, field) + case "parent": + return ec.fieldContext_PropertyField_parent(ctx, field) + case "schema": + return ec.fieldContext_PropertyField_schema(ctx, field) + case "field": + return ec.fieldContext_PropertyField_field(ctx, field) + case "actualValue": + return ec.fieldContext_PropertyField_actualValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyGroup_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroup_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroup_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroup_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroup_schemaGroupId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaGroupID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroup_fields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.PropertyField) + fc.Result = res + return ec.marshalNPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroup_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertyField_id(ctx, field) + case "parentId": + return ec.fieldContext_PropertyField_parentId(ctx, field) + case "schemaId": + return ec.fieldContext_PropertyField_schemaId(ctx, field) + case "fieldId": + return ec.fieldContext_PropertyField_fieldId(ctx, field) + case "links": + return ec.fieldContext_PropertyField_links(ctx, field) + case "type": + return ec.fieldContext_PropertyField_type(ctx, field) + case "value": + return ec.fieldContext_PropertyField_value(ctx, field) + case "parent": + return ec.fieldContext_PropertyField_parent(ctx, field) + case "schema": + return ec.fieldContext_PropertyField_schema(ctx, field) + case "field": + return ec.fieldContext_PropertyField_field(ctx, field) + case "actualValue": + return ec.fieldContext_PropertyField_actualValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyGroup_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroup_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyGroup().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroup_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyGroup_schemaGroup(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroup_schemaGroup(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyGroup().SchemaGroup(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchemaGroup) + fc.Result = res + return ec.marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroup_schemaGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "schemaGroupId": + return ec.fieldContext_PropertySchemaGroup_schemaGroupId(ctx, field) + case "schemaId": + return ec.fieldContext_PropertySchemaGroup_schemaId(ctx, field) + case "fields": + return ec.fieldContext_PropertySchemaGroup_fields(ctx, field) + case "isList": + return ec.fieldContext_PropertySchemaGroup_isList(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaGroup_isAvailableIf(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaGroup_title(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaGroup_allTranslatedTitle(ctx, field) + case "representativeFieldId": + return ec.fieldContext_PropertySchemaGroup_representativeFieldId(ctx, field) + case "representativeField": + return ec.fieldContext_PropertySchemaGroup_representativeField(ctx, field) + case "schema": + return ec.fieldContext_PropertySchemaGroup_schema(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaGroup_translatedTitle(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyGroupList_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroupList_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroupList_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyGroupList_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroupList_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroupList_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyGroupList_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroupList_schemaGroupId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaGroupID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroupList_schemaGroupId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyGroupList_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroupList_groups(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Groups, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.PropertyGroup) + fc.Result = res + return ec.marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroupแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroupList_groups(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertyGroup_id(ctx, field) + case "schemaId": + return ec.fieldContext_PropertyGroup_schemaId(ctx, field) + case "schemaGroupId": + return ec.fieldContext_PropertyGroup_schemaGroupId(ctx, field) + case "fields": + return ec.fieldContext_PropertyGroup_fields(ctx, field) + case "schema": + return ec.fieldContext_PropertyGroup_schema(ctx, field) + case "schemaGroup": + return ec.fieldContext_PropertyGroup_schemaGroup(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyGroupList_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroupList_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyGroupList().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroupList_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyGroupList_schemaGroup(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroupList_schemaGroup(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyGroupList().SchemaGroup(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchemaGroup) + fc.Result = res + return ec.marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroupList_schemaGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "schemaGroupId": + return ec.fieldContext_PropertySchemaGroup_schemaGroupId(ctx, field) + case "schemaId": + return ec.fieldContext_PropertySchemaGroup_schemaId(ctx, field) + case "fields": + return ec.fieldContext_PropertySchemaGroup_fields(ctx, field) + case "isList": + return ec.fieldContext_PropertySchemaGroup_isList(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaGroup_isAvailableIf(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaGroup_title(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaGroup_allTranslatedTitle(ctx, field) + case "representativeFieldId": + return ec.fieldContext_PropertySchemaGroup_representativeFieldId(ctx, field) + case "representativeField": + return ec.fieldContext_PropertySchemaGroup_representativeField(ctx, field) + case "schema": + return ec.fieldContext_PropertySchemaGroup_schema(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaGroup_translatedTitle(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyItemPayload_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyItemPayload_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyItemPayload_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyItemPayload_propertyItem(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyItemPayload_propertyItem(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyItem, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(gqlmodel.PropertyItem) + fc.Result = res + return ec.marshalOPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyItemPayload_propertyItem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type PropertyItem does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyLinkableFields_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyLinkableFields_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyLinkableFields_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyLinkableFields_latlng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyLinkableFields_latlng(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Latlng, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyLinkableFields_latlng(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyLinkableFields_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyLinkableFields_url(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.URL, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyLinkableFields_url(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyLinkableFields_latlngField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyLinkableFields_latlngField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyLinkableFields().LatlngField(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchemaField) + fc.Result = res + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyLinkableFields_latlngField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertySchemaField_type(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaField_title(ctx, field) + case "description": + return ec.fieldContext_PropertySchemaField_description(ctx, field) + case "prefix": + return ec.fieldContext_PropertySchemaField_prefix(ctx, field) + case "suffix": + return ec.fieldContext_PropertySchemaField_suffix(ctx, field) + case "defaultValue": + return ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + case "ui": + return ec.fieldContext_PropertySchemaField_ui(ctx, field) + case "min": + return ec.fieldContext_PropertySchemaField_min(ctx, field) + case "max": + return ec.fieldContext_PropertySchemaField_max(ctx, field) + case "choices": + return ec.fieldContext_PropertySchemaField_choices(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + case "translatedDescription": + return ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyLinkableFields_urlField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyLinkableFields_urlField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyLinkableFields().URLField(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchemaField) + fc.Result = res + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyLinkableFields_urlField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertySchemaField_type(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaField_title(ctx, field) + case "description": + return ec.fieldContext_PropertySchemaField_description(ctx, field) + case "prefix": + return ec.fieldContext_PropertySchemaField_prefix(ctx, field) + case "suffix": + return ec.fieldContext_PropertySchemaField_suffix(ctx, field) + case "defaultValue": + return ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + case "ui": + return ec.fieldContext_PropertySchemaField_ui(ctx, field) + case "min": + return ec.fieldContext_PropertySchemaField_min(ctx, field) + case "max": + return ec.fieldContext_PropertySchemaField_max(ctx, field) + case "choices": + return ec.fieldContext_PropertySchemaField_choices(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + case "translatedDescription": + return ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertyLinkableFields_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyLinkableFields_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyLinkableFields().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyLinkableFields_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchema_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchema_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchema_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchema", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchema_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchema_groups(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Groups, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.PropertySchemaGroup) + fc.Result = res + return ec.marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroupแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchema_groups(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchema", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "schemaGroupId": + return ec.fieldContext_PropertySchemaGroup_schemaGroupId(ctx, field) + case "schemaId": + return ec.fieldContext_PropertySchemaGroup_schemaId(ctx, field) + case "fields": + return ec.fieldContext_PropertySchemaGroup_fields(ctx, field) + case "isList": + return ec.fieldContext_PropertySchemaGroup_isList(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaGroup_isAvailableIf(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaGroup_title(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaGroup_allTranslatedTitle(ctx, field) + case "representativeFieldId": + return ec.fieldContext_PropertySchemaGroup_representativeFieldId(ctx, field) + case "representativeField": + return ec.fieldContext_PropertySchemaGroup_representativeField(ctx, field) + case "schema": + return ec.fieldContext_PropertySchemaGroup_schema(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaGroup_translatedTitle(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchema_linkableFields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchema_linkableFields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkableFields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyLinkableFields) + fc.Result = res + return ec.marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyLinkableFields(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchema_linkableFields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchema", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "schemaId": + return ec.fieldContext_PropertyLinkableFields_schemaId(ctx, field) + case "latlng": + return ec.fieldContext_PropertyLinkableFields_latlng(ctx, field) + case "url": + return ec.fieldContext_PropertyLinkableFields_url(ctx, field) + case "latlngField": + return ec.fieldContext_PropertyLinkableFields_latlngField(ctx, field) + case "urlField": + return ec.fieldContext_PropertyLinkableFields_urlField(ctx, field) + case "schema": + return ec.fieldContext_PropertyLinkableFields_schema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyLinkableFields", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_fieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ValueType) + fc.Result = res + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ValueType does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_title(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_title(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Title, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_title(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_prefix(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_prefix(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Prefix, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_prefix(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_suffix(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_suffix(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Suffix, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_suffix(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_defaultValue(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DefaultValue, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_defaultValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_ui(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_ui(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UI, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchemaFieldUI) + fc.Result = res + return ec.marshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldUI(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_ui(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type PropertySchemaFieldUI does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_min(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_min(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Min, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*float64) + fc.Result = res + return ec.marshalOFloat2แš–float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_min(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_max(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_max(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Max, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*float64) + fc.Result = res + return ec.marshalOFloat2แš–float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_max(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_choices(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_choices(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Choices, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]*gqlmodel.PropertySchemaFieldChoice) + fc.Result = res + return ec.marshalOPropertySchemaFieldChoice2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldChoiceแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_choices(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "key": + return ec.fieldContext_PropertySchemaFieldChoice_key(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaFieldChoice_title(ctx, field) + case "icon": + return ec.fieldContext_PropertySchemaFieldChoice_icon(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaFieldChoice_allTranslatedTitle(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaFieldChoice_translatedTitle(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaFieldChoice", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_isAvailableIf(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsAvailableIf, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyCondition) + fc.Result = res + return ec.marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyCondition(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_isAvailableIf(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertyCondition_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertyCondition_type(ctx, field) + case "value": + return ec.fieldContext_PropertyCondition_value(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyCondition", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedTitle, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_allTranslatedTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedDescription, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_allTranslatedDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertySchemaField().TranslatedTitle(rctx, obj, fc.Args["lang"].(*language.Tag)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_translatedTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_PropertySchemaField_translatedTitle_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertySchemaField().TranslatedDescription(rctx, obj, fc.Args["lang"].(*language.Tag)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaField_translatedDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_PropertySchemaField_translatedDescription_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaFieldChoice_key(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaFieldChoice_key(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Key, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaFieldChoice_key(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaFieldChoice_title(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaFieldChoice_title(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Title, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaFieldChoice_title(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaFieldChoice_icon(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaFieldChoice_icon(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Icon, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaFieldChoice_icon(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaFieldChoice_allTranslatedTitle(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedTitle, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaFieldChoice_allTranslatedTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaFieldChoice_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaFieldChoice_translatedTitle(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertySchemaFieldChoice().TranslatedTitle(rctx, obj, fc.Args["lang"].(*language.Tag)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaFieldChoice_translatedTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_PropertySchemaFieldChoice_translatedTitle_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_schemaGroupId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaGroupID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaGroup_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_fields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.PropertySchemaField) + fc.Result = res + return ec.marshalNPropertySchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaGroup_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertySchemaField_type(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaField_title(ctx, field) + case "description": + return ec.fieldContext_PropertySchemaField_description(ctx, field) + case "prefix": + return ec.fieldContext_PropertySchemaField_prefix(ctx, field) + case "suffix": + return ec.fieldContext_PropertySchemaField_suffix(ctx, field) + case "defaultValue": + return ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + case "ui": + return ec.fieldContext_PropertySchemaField_ui(ctx, field) + case "min": + return ec.fieldContext_PropertySchemaField_min(ctx, field) + case "max": + return ec.fieldContext_PropertySchemaField_max(ctx, field) + case "choices": + return ec.fieldContext_PropertySchemaField_choices(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + case "translatedDescription": + return ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaGroup_isList(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_isList(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsList, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaGroup_isList(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaGroup_isAvailableIf(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_isAvailableIf(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsAvailableIf, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertyCondition) + fc.Result = res + return ec.marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyCondition(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaGroup_isAvailableIf(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertyCondition_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertyCondition_type(ctx, field) + case "value": + return ec.fieldContext_PropertyCondition_value(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyCondition", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaGroup_title(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_title(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Title, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaGroup_title(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaGroup_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_allTranslatedTitle(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedTitle, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaGroup_allTranslatedTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaGroup_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_representativeFieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RepresentativeFieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaGroup_representativeFieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaGroup_representativeField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_representativeField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RepresentativeField, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchemaField) + fc.Result = res + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaGroup_representativeField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertySchemaField_type(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaField_title(ctx, field) + case "description": + return ec.fieldContext_PropertySchemaField_description(ctx, field) + case "prefix": + return ec.fieldContext_PropertySchemaField_prefix(ctx, field) + case "suffix": + return ec.fieldContext_PropertySchemaField_suffix(ctx, field) + case "defaultValue": + return ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + case "ui": + return ec.fieldContext_PropertySchemaField_ui(ctx, field) + case "min": + return ec.fieldContext_PropertySchemaField_min(ctx, field) + case "max": + return ec.fieldContext_PropertySchemaField_max(ctx, field) + case "choices": + return ec.fieldContext_PropertySchemaField_choices(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + case "translatedDescription": + return ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaGroup_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertySchemaGroup().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaGroup_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaGroup_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_translatedTitle(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertySchemaGroup().TranslatedTitle(rctx, obj, fc.Args["lang"].(*language.Tag)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertySchemaGroup_translatedTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_PropertySchemaGroup_translatedTitle_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_me(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_me(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Me(rctx) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Me) + fc.Result = res + return ec.marshalOMe2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMe(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_me(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Me_id(ctx, field) + case "name": + return ec.fieldContext_Me_name(ctx, field) + case "email": + return ec.fieldContext_Me_email(ctx, field) + case "lang": + return ec.fieldContext_Me_lang(ctx, field) + case "theme": + return ec.fieldContext_Me_theme(ctx, field) + case "myTeamId": + return ec.fieldContext_Me_myTeamId(ctx, field) + case "auths": + return ec.fieldContext_Me_auths(ctx, field) + case "teams": + return ec.fieldContext_Me_teams(ctx, field) + case "myTeam": + return ec.fieldContext_Me_myTeam(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Me", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Query_node(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_node(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Node(rctx, fc.Args["id"].(gqlmodel.ID), fc.Args["type"].(gqlmodel.NodeType)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(gqlmodel.Node) + fc.Result = res + return ec.marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_node(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_node_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_nodes(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_nodes(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Nodes(rctx, fc.Args["id"].([]gqlmodel.ID), fc.Args["type"].(gqlmodel.NodeType)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.Node) + fc.Result = res + return ec.marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_nodes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_nodes_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_propertySchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_propertySchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().PropertySchema(rctx, fc.Args["id"].(gqlmodel.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_propertySchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_propertySchema_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_propertySchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_propertySchemas(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().PropertySchemas(rctx, fc.Args["id"].([]gqlmodel.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.PropertySchema) + fc.Result = res + return ec.marshalNPropertySchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_propertySchemas(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_propertySchemas_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_plugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Plugin(rctx, fc.Args["id"].(gqlmodel.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_plugin_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_plugins(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_plugins(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Plugins(rctx, fc.Args["id"].([]gqlmodel.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.Plugin) + fc.Result = res + return ec.marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_plugins(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_plugins_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_layer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Layer(rctx, fc.Args["id"].(gqlmodel.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_layer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_scene(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Scene(rctx, fc.Args["projectId"].(gqlmodel.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_scene_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_assets(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_assets(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Assets(rctx, fc.Args["teamId"].(gqlmodel.ID), fc.Args["keyword"].(*string), fc.Args["sort"].(*gqlmodel.AssetSortType), fc.Args["pagination"].(*gqlmodel.Pagination)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.AssetConnection) + fc.Result = res + return ec.marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_assets(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_AssetConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_AssetConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_AssetConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_AssetConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AssetConnection", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_assets_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_projects(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_projects(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Projects(rctx, fc.Args["teamId"].(gqlmodel.ID), fc.Args["includeArchived"].(*bool), fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.ProjectConnection) + fc.Result = res + return ec.marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_projects(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_ProjectConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_ProjectConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_ProjectConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_ProjectConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectConnection", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_projects_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_datasetSchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_datasetSchemas(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().DatasetSchemas(rctx, fc.Args["sceneId"].(gqlmodel.ID), fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchemaConnection) + fc.Result = res + return ec.marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_datasetSchemas(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_DatasetSchemaConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_DatasetSchemaConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_DatasetSchemaConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchemaConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaConnection", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_datasetSchemas_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_datasets(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_datasets(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Datasets(rctx, fc.Args["datasetSchemaId"].(gqlmodel.ID), fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetConnection) + fc.Result = res + return ec.marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_datasets(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_DatasetConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_DatasetConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_DatasetConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetConnection", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_datasets_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_dynamicDatasetSchemas(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().DynamicDatasetSchemas(rctx, fc.Args["sceneId"].(gqlmodel.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_dynamicDatasetSchemas_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_searchUser(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_searchUser(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().SearchUser(rctx, fc.Args["nameOrEmail"].(string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.User) + fc.Result = res + return ec.marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_searchUser(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_User_id(ctx, field) + case "name": + return ec.fieldContext_User_name(ctx, field) + case "email": + return ec.fieldContext_User_email(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type User", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_searchUser_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_checkProjectAlias(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_checkProjectAlias(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().CheckProjectAlias(rctx, fc.Args["alias"].(string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.ProjectAliasAvailability) + fc.Result = res + return ec.marshalNProjectAliasAvailability2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectAliasAvailability(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_checkProjectAlias(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "alias": + return ec.fieldContext_ProjectAliasAvailability_alias(ctx, field) + case "available": + return ec.fieldContext_ProjectAliasAvailability_available(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectAliasAvailability", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_checkProjectAlias_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query___type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.introspectType(fc.Args["name"].(string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query___type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query___type_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query___schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.introspectSchema() + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Schema) + fc.Result = res + return ec.marshalO__Schema2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query___schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "description": + return ec.fieldContext___Schema_description(ctx, field) + case "types": + return ec.fieldContext___Schema_types(ctx, field) + case "queryType": + return ec.fieldContext___Schema_queryType(ctx, field) + case "mutationType": + return ec.fieldContext___Schema_mutationType(ctx, field) + case "subscriptionType": + return ec.fieldContext___Schema_subscriptionType(ctx, field) + case "directives": + return ec.fieldContext___Schema_directives(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Schema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Rect_west(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Rect_west(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.West, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Rect_west(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Rect", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Rect_south(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Rect_south(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.South, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Rect_south(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Rect", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Rect_east(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Rect_east(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.East, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Rect_east(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Rect", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Rect_north(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Rect_north(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.North, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Rect_north(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Rect", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveAssetPayload_assetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveAssetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveAssetPayload_assetId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AssetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveAssetPayload_assetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveAssetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveClusterPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveClusterPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveClusterPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveClusterPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveClusterPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveClusterPayload_clusterId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveClusterPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveClusterPayload_clusterId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ClusterID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveClusterPayload_clusterId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveClusterPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveDatasetSchemaPayload_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveDatasetSchemaPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveDatasetSchemaPayload_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveDatasetSchemaPayload_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveDatasetSchemaPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveInfoboxFieldPayload_infoboxFieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.InfoboxFieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveInfoboxFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveInfoboxFieldPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveInfoboxFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveInfoboxPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveInfoboxPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveInfoboxPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveLayerPayload_layerId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LayerID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveLayerPayload_parentLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveMemberFromTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveMemberFromTeamPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveMemberFromTeamPayload_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Team, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveMemberFromTeamPayload_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveMemberFromTeamPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveTagPayload_tagId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveTagPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveTagPayload_tagId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TagID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveTagPayload_tagId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveTagPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveTagPayload_updatedLayers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveTagPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveTagPayload_updatedLayers(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UpdatedLayers, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveTagPayload_updatedLayers(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveTagPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveWidgetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveWidgetPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveWidgetPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveWidgetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _RemoveWidgetPayload_widgetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveWidgetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveWidgetPayload_widgetId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.WidgetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RemoveWidgetPayload_widgetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveWidgetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_projectId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_projectId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ProjectID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_projectId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_teamId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TeamID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_teamId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_propertyId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_createdAt(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.CreatedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(time.Time) + fc.Result = res + return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_createdAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_updatedAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_updatedAt(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UpdatedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(time.Time) + fc.Result = res + return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_updatedAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_rootLayerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_rootLayerId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RootLayerID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_rootLayerId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_widgets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_widgets(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Widgets, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.SceneWidget) + fc.Result = res + return ec.marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidgetแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_widgets(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_SceneWidget_id(ctx, field) + case "pluginId": + return ec.fieldContext_SceneWidget_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_SceneWidget_extensionId(ctx, field) + case "propertyId": + return ec.fieldContext_SceneWidget_propertyId(ctx, field) + case "enabled": + return ec.fieldContext_SceneWidget_enabled(ctx, field) + case "extended": + return ec.fieldContext_SceneWidget_extended(ctx, field) + case "plugin": + return ec.fieldContext_SceneWidget_plugin(ctx, field) + case "extension": + return ec.fieldContext_SceneWidget_extension(ctx, field) + case "property": + return ec.fieldContext_SceneWidget_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type SceneWidget", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_plugins(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_plugins(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Plugins, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.ScenePlugin) + fc.Result = res + return ec.marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePluginแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_plugins(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_widgetAlignSystem(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.WidgetAlignSystem, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetAlignSystem) + fc.Result = res + return ec.marshalOWidgetAlignSystem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAlignSystem(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_widgetAlignSystem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "inner": + return ec.fieldContext_WidgetAlignSystem_inner(ctx, field) + case "outer": + return ec.fieldContext_WidgetAlignSystem_outer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetAlignSystem", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DynamicDatasetSchemas, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_project(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_project(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().Project(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Project) + fc.Result = res + return ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_project(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Project_id(ctx, field) + case "isArchived": + return ec.fieldContext_Project_isArchived(ctx, field) + case "isBasicAuthActive": + return ec.fieldContext_Project_isBasicAuthActive(ctx, field) + case "basicAuthUsername": + return ec.fieldContext_Project_basicAuthUsername(ctx, field) + case "basicAuthPassword": + return ec.fieldContext_Project_basicAuthPassword(ctx, field) + case "createdAt": + return ec.fieldContext_Project_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Project_updatedAt(ctx, field) + case "publishedAt": + return ec.fieldContext_Project_publishedAt(ctx, field) + case "name": + return ec.fieldContext_Project_name(ctx, field) + case "description": + return ec.fieldContext_Project_description(ctx, field) + case "alias": + return ec.fieldContext_Project_alias(ctx, field) + case "publicTitle": + return ec.fieldContext_Project_publicTitle(ctx, field) + case "publicDescription": + return ec.fieldContext_Project_publicDescription(ctx, field) + case "publicImage": + return ec.fieldContext_Project_publicImage(ctx, field) + case "publicNoIndex": + return ec.fieldContext_Project_publicNoIndex(ctx, field) + case "imageUrl": + return ec.fieldContext_Project_imageUrl(ctx, field) + case "teamId": + return ec.fieldContext_Project_teamId(ctx, field) + case "visualizer": + return ec.fieldContext_Project_visualizer(ctx, field) + case "publishmentStatus": + return ec.fieldContext_Project_publishmentStatus(ctx, field) + case "team": + return ec.fieldContext_Project_team(ctx, field) + case "scene": + return ec.fieldContext_Project_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Project", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().Team(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Team) + fc.Result = res + return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_rootLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_rootLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().RootLayer(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerGroup) + fc.Result = res + return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_rootLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_datasetSchemas(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_datasetSchemas(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().DatasetSchemas(rctx, obj, fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchemaConnection) + fc.Result = res + return ec.marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_datasetSchemas(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_DatasetSchemaConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_DatasetSchemaConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_DatasetSchemaConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchemaConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaConnection", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Scene_datasetSchemas_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Scene_tagIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_tagIds(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TagIds, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.ID) + fc.Result = res + return ec.marshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_tagIds(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_tags(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().Tags(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.Tag) + fc.Result = res + return ec.marshalNTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_tags(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _Scene_clusters(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_clusters(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Clusters, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.Cluster) + fc.Result = res + return ec.marshalNCluster2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšClusterแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Scene_clusters(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Cluster_id(ctx, field) + case "name": + return ec.fieldContext_Cluster_name(ctx, field) + case "propertyId": + return ec.fieldContext_Cluster_propertyId(ctx, field) + case "property": + return ec.fieldContext_Cluster_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Cluster", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _ScenePlugin_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ScenePlugin_pluginId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ScenePlugin_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ScenePlugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _ScenePlugin_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ScenePlugin_propertyId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ScenePlugin_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ScenePlugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _ScenePlugin_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ScenePlugin_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.ScenePlugin().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ScenePlugin_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ScenePlugin", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _ScenePlugin_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ScenePlugin_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.ScenePlugin().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ScenePlugin_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ScenePlugin", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _SceneWidget_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SceneWidget_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _SceneWidget_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_pluginId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SceneWidget_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _SceneWidget_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_extensionId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SceneWidget_extensionId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _SceneWidget_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_propertyId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SceneWidget_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _SceneWidget_enabled(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_enabled(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Enabled, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SceneWidget_enabled(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _SceneWidget_extended(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_extended(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Extended, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SceneWidget_extended(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _SceneWidget_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.SceneWidget().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SceneWidget_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _SceneWidget_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_extension(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.SceneWidget().Extension(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PluginExtension) + fc.Result = res + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SceneWidget_extension(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extensionId": + return ec.fieldContext_PluginExtension_extensionId(ctx, field) + case "pluginId": + return ec.fieldContext_PluginExtension_pluginId(ctx, field) + case "type": + return ec.fieldContext_PluginExtension_type(ctx, field) + case "name": + return ec.fieldContext_PluginExtension_name(ctx, field) + case "description": + return ec.fieldContext_PluginExtension_description(ctx, field) + case "icon": + return ec.fieldContext_PluginExtension_icon(ctx, field) + case "singleOnly": + return ec.fieldContext_PluginExtension_singleOnly(ctx, field) + case "widgetLayout": + return ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + case "visualizer": + return ec.fieldContext_PluginExtension_visualizer(ctx, field) + case "propertySchemaId": + return ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + case "allTranslatedName": + return ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + case "plugin": + return ec.fieldContext_PluginExtension_plugin(ctx, field) + case "sceneWidget": + return ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + case "propertySchema": + return ec.fieldContext_PluginExtension_propertySchema(ctx, field) + case "translatedName": + return ec.fieldContext_PluginExtension_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginExtension", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _SceneWidget_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.SceneWidget().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SceneWidget_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _SignupPayload_user(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SignupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SignupPayload_user(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.User, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.User) + fc.Result = res + return ec.marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SignupPayload_user(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SignupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_User_id(ctx, field) + case "name": + return ec.fieldContext_User_name(ctx, field) + case "email": + return ec.fieldContext_User_email(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type User", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _SignupPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SignupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SignupPayload_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Team, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SignupPayload_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SignupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _SyncDatasetPayload_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SyncDatasetPayload_sceneId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SyncDatasetPayload_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SyncDatasetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _SyncDatasetPayload_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SyncDatasetPayload_url(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.URL, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SyncDatasetPayload_url(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SyncDatasetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _SyncDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SyncDatasetPayload_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchema, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SyncDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SyncDatasetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _SyncDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SyncDatasetPayload_dataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Dataset, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.Dataset) + fc.Result = res + return ec.marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_SyncDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SyncDatasetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _TagGroup_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagGroup_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TagGroup_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_sceneId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagGroup_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TagGroup_label(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_label(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Label, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagGroup_label(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TagGroup_tagIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_tagIds(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TagIds, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagGroup_tagIds(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TagGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_tags(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagGroup().Tags(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.TagItem) + fc.Result = res + return ec.marshalNTagItem2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItemแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagGroup_tags(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagItem_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagItem_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagItem_label(ctx, field) + case "parentId": + return ec.fieldContext_TagItem_parentId(ctx, field) + case "linkedDatasetID": + return ec.fieldContext_TagItem_linkedDatasetID(ctx, field) + case "linkedDatasetSchemaID": + return ec.fieldContext_TagItem_linkedDatasetSchemaID(ctx, field) + case "linkedDatasetFieldID": + return ec.fieldContext_TagItem_linkedDatasetFieldID(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_TagItem_linkedDatasetSchema(ctx, field) + case "linkedDataset": + return ec.fieldContext_TagItem_linkedDataset(ctx, field) + case "linkedDatasetField": + return ec.fieldContext_TagItem_linkedDatasetField(ctx, field) + case "parent": + return ec.fieldContext_TagItem_parent(ctx, field) + case "layers": + return ec.fieldContext_TagItem_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagItem", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _TagGroup_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagGroup().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagGroup_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _TagGroup_layers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_layers(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagGroup().Layers(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagGroup_layers(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _TagItem_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagItem_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TagItem_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_sceneId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagItem_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TagItem_label(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_label(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Label, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagItem_label(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TagItem_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_parentId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagItem_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TagItem_linkedDatasetID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_linkedDatasetID(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagItem_linkedDatasetID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TagItem_linkedDatasetSchemaID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_linkedDatasetSchemaID(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetSchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagItem_linkedDatasetSchemaID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TagItem_linkedDatasetFieldID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_linkedDatasetFieldID(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetFieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagItem_linkedDatasetFieldID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TagItem_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_linkedDatasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagItem().LinkedDatasetSchema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagItem_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _TagItem_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_linkedDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagItem().LinkedDataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagItem_linkedDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _TagItem_linkedDatasetField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_linkedDatasetField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagItem().LinkedDatasetField(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetField) + fc.Result = res + return ec.marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagItem_linkedDatasetField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_DatasetField_fieldId(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetField_schemaId(ctx, field) + case "source": + return ec.fieldContext_DatasetField_source(ctx, field) + case "type": + return ec.fieldContext_DatasetField_type(ctx, field) + case "value": + return ec.fieldContext_DatasetField_value(ctx, field) + case "schema": + return ec.fieldContext_DatasetField_schema(ctx, field) + case "field": + return ec.fieldContext_DatasetField_field(ctx, field) + case "valueRef": + return ec.fieldContext_DatasetField_valueRef(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetField", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _TagItem_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagItem().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.TagGroup) + fc.Result = res + return ec.marshalOTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagItem_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagGroup_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagGroup_label(ctx, field) + case "tagIds": + return ec.fieldContext_TagGroup_tagIds(ctx, field) + case "tags": + return ec.fieldContext_TagGroup_tags(ctx, field) + case "scene": + return ec.fieldContext_TagGroup_scene(ctx, field) + case "layers": + return ec.fieldContext_TagGroup_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagGroup", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _TagItem_layers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_layers(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagItem().Layers(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TagItem_layers(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _Team_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Team_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Team_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Team", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Team_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Team_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Team_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Team", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Team_members(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Team_members(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Members, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.TeamMember) + fc.Result = res + return ec.marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamMemberแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Team_members(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Team", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "userId": + return ec.fieldContext_TeamMember_userId(ctx, field) + case "role": + return ec.fieldContext_TeamMember_role(ctx, field) + case "user": + return ec.fieldContext_TeamMember_user(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TeamMember", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Team_personal(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Team_personal(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Personal, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Team_personal(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Team", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Team_assets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Team_assets(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Team().Assets(rctx, obj, fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.AssetConnection) + fc.Result = res + return ec.marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Team_assets(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Team", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_AssetConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_AssetConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_AssetConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_AssetConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AssetConnection", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Team_assets_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Team_projects(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Team_projects(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Team().Projects(rctx, obj, fc.Args["includeArchived"].(*bool), fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.ProjectConnection) + fc.Result = res + return ec.marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Team_projects(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Team", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_ProjectConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_ProjectConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_ProjectConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_ProjectConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectConnection", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Team_projects_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _TeamMember_userId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TeamMember) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TeamMember_userId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UserID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TeamMember_userId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TeamMember", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TeamMember_role(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TeamMember) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TeamMember_role(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Role, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Role) + fc.Result = res + return ec.marshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRole(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TeamMember_role(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TeamMember", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Role does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TeamMember_user(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TeamMember) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TeamMember_user(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TeamMember().User(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.User) + fc.Result = res + return ec.marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TeamMember_user(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TeamMember", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_User_id(ctx, field) + case "name": + return ec.fieldContext_User_name(ctx, field) + case "email": + return ec.fieldContext_User_email(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type User", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _Typography_fontFamily(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_fontFamily(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FontFamily, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Typography_fontFamily(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Typography", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Typography_fontWeight(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_fontWeight(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FontWeight, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Typography_fontWeight(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Typography", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Typography_fontSize(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_fontSize(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FontSize, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2แš–int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Typography_fontSize(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Typography", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Typography_color(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_color(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Color, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Typography_color(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Typography", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Typography_textAlign(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_textAlign(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TextAlign, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.TextAlign) + fc.Result = res + return ec.marshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTextAlign(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Typography_textAlign(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Typography", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TextAlign does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Typography_bold(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_bold(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Bold, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*bool) + fc.Result = res + return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Typography_bold(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Typography", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Typography_italic(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_italic(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Italic, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*bool) + fc.Result = res + return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Typography_italic(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Typography", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Typography_underline(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_underline(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Underline, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*bool) + fc.Result = res + return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Typography_underline(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Typography", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _UninstallPluginPayload_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UninstallPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UninstallPluginPayload_pluginId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UninstallPluginPayload_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UninstallPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UninstallPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UninstallPluginPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UninstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UninstallPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UpdateClusterPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateClusterPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateClusterPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpdateClusterPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateClusterPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UpdateClusterPayload_cluster(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateClusterPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateClusterPayload_cluster(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cluster, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Cluster) + fc.Result = res + return ec.marshalNCluster2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCluster(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpdateClusterPayload_cluster(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateClusterPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Cluster_id(ctx, field) + case "name": + return ec.fieldContext_Cluster_name(ctx, field) + case "propertyId": + return ec.fieldContext_Cluster_propertyId(ctx, field) + case "property": + return ec.fieldContext_Cluster_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Cluster", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UpdateDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateDatasetSchemaPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateDatasetSchemaPayload_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchema, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpdateDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateDatasetSchemaPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UpdateLayerPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateLayerPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpdateLayerPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _UpdateMePayload_me(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateMePayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateMePayload_me(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Me, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Me) + fc.Result = res + return ec.marshalNMe2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMe(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpdateMePayload_me(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateMePayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Me_id(ctx, field) + case "name": + return ec.fieldContext_Me_name(ctx, field) + case "email": + return ec.fieldContext_Me_email(ctx, field) + case "lang": + return ec.fieldContext_Me_lang(ctx, field) + case "theme": + return ec.fieldContext_Me_theme(ctx, field) + case "myTeamId": + return ec.fieldContext_Me_myTeamId(ctx, field) + case "auths": + return ec.fieldContext_Me_auths(ctx, field) + case "teams": + return ec.fieldContext_Me_teams(ctx, field) + case "myTeam": + return ec.fieldContext_Me_myTeam(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Me", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UpdateMemberOfTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateMemberOfTeamPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateMemberOfTeamPayload_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Team, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpdateMemberOfTeamPayload_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateMemberOfTeamPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UpdateTagPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateTagPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateTagPayload_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tag, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Tag) + fc.Result = res + return ec.marshalNTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpdateTagPayload_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateTagPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + +func (ec *executionContext) _UpdateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateTeamPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateTeamPayload_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Team, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpdateTeamPayload_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateTeamPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UpdateWidgetAlignSystemPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateWidgetAlignSystemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateWidgetAlignSystemPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpdateWidgetAlignSystemPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateWidgetAlignSystemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UpdateWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateWidgetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateWidgetPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpdateWidgetPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateWidgetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UpdateWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateWidgetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateWidgetPayload_sceneWidget(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneWidget, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.SceneWidget) + fc.Result = res + return ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpdateWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateWidgetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_SceneWidget_id(ctx, field) + case "pluginId": + return ec.fieldContext_SceneWidget_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_SceneWidget_extensionId(ctx, field) + case "propertyId": + return ec.fieldContext_SceneWidget_propertyId(ctx, field) + case "enabled": + return ec.fieldContext_SceneWidget_enabled(ctx, field) + case "extended": + return ec.fieldContext_SceneWidget_extended(ctx, field) + case "plugin": + return ec.fieldContext_SceneWidget_plugin(ctx, field) + case "extension": + return ec.fieldContext_SceneWidget_extension(ctx, field) + case "property": + return ec.fieldContext_SceneWidget_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type SceneWidget", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UpgradePluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpgradePluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpgradePluginPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpgradePluginPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpgradePluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UpgradePluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpgradePluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpgradePluginPayload_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ScenePlugin, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.ScenePlugin) + fc.Result = res + return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UpgradePluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpgradePluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UploadPluginPayload_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UploadPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UploadPluginPayload_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Plugin, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Plugin) + fc.Result = res + return ec.marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UploadPluginPayload_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UploadPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UploadPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UploadPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UploadPluginPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UploadPluginPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UploadPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _UploadPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UploadPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UploadPluginPayload_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ScenePlugin, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.ScenePlugin) + fc.Result = res + return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_UploadPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UploadPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _User_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_User_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_User_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "User", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _User_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_User_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_User_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "User", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _User_email(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_User_email(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Email, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_User_email(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "User", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetAlignSystem_inner(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetAlignSystem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetAlignSystem_inner(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Inner, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetZone) + fc.Result = res + return ec.marshalOWidgetZone2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZone(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetAlignSystem_inner(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetAlignSystem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "left": + return ec.fieldContext_WidgetZone_left(ctx, field) + case "center": + return ec.fieldContext_WidgetZone_center(ctx, field) + case "right": + return ec.fieldContext_WidgetZone_right(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetZone", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetAlignSystem_outer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetAlignSystem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetAlignSystem_outer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Outer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetZone) + fc.Result = res + return ec.marshalOWidgetZone2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZone(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetAlignSystem_outer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetAlignSystem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "left": + return ec.fieldContext_WidgetZone_left(ctx, field) + case "center": + return ec.fieldContext_WidgetZone_center(ctx, field) + case "right": + return ec.fieldContext_WidgetZone_right(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetZone", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetArea_widgetIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetArea) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetArea_widgetIds(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.WidgetIds, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.ID) + fc.Result = res + return ec.marshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetArea_widgetIds(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetArea", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetArea_align(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetArea) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetArea_align(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Align, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.WidgetAreaAlign) + fc.Result = res + return ec.marshalNWidgetAreaAlign2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetArea_align(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetArea", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type WidgetAreaAlign does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetExtendable_vertically(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetExtendable) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetExtendable_vertically(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Vertically, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetExtendable_vertically(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetExtendable", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetExtendable_horizontally(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetExtendable) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetExtendable_horizontally(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Horizontally, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetExtendable_horizontally(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetExtendable", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetLayout_extendable(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLayout_extendable(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Extendable, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetExtendable) + fc.Result = res + return ec.marshalNWidgetExtendable2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetExtendable(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetLayout_extendable(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetLayout", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "vertically": + return ec.fieldContext_WidgetExtendable_vertically(ctx, field) + case "horizontally": + return ec.fieldContext_WidgetExtendable_horizontally(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetExtendable", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetLayout_extended(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLayout_extended(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Extended, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetLayout_extended(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetLayout", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetLayout_floating(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLayout_floating(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Floating, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetLayout_floating(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetLayout", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetLayout_defaultLocation(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLayout_defaultLocation(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DefaultLocation, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetLocation) + fc.Result = res + return ec.marshalOWidgetLocation2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocation(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetLayout_defaultLocation(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetLayout", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "zone": + return ec.fieldContext_WidgetLocation_zone(ctx, field) + case "section": + return ec.fieldContext_WidgetLocation_section(ctx, field) + case "area": + return ec.fieldContext_WidgetLocation_area(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetLocation", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetLocation_zone(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLocation) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLocation_zone(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Zone, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.WidgetZoneType) + fc.Result = res + return ec.marshalNWidgetZoneType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZoneType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetLocation_zone(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetLocation", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type WidgetZoneType does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetLocation_section(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLocation) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLocation_section(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Section, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.WidgetSectionType) + fc.Result = res + return ec.marshalNWidgetSectionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSectionType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetLocation_section(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetLocation", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type WidgetSectionType does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetLocation_area(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLocation) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLocation_area(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Area, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.WidgetAreaType) + fc.Result = res + return ec.marshalNWidgetAreaType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetLocation_area(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetLocation", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type WidgetAreaType does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetSection_top(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetSection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetSection_top(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Top, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetArea) + fc.Result = res + return ec.marshalOWidgetArea2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetArea(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetSection_top(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetSection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "widgetIds": + return ec.fieldContext_WidgetArea_widgetIds(ctx, field) + case "align": + return ec.fieldContext_WidgetArea_align(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetArea", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetSection_middle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetSection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetSection_middle(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Middle, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetArea) + fc.Result = res + return ec.marshalOWidgetArea2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetArea(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetSection_middle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetSection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "widgetIds": + return ec.fieldContext_WidgetArea_widgetIds(ctx, field) + case "align": + return ec.fieldContext_WidgetArea_align(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetArea", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetSection_bottom(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetSection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetSection_bottom(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Bottom, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetArea) + fc.Result = res + return ec.marshalOWidgetArea2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetArea(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetSection_bottom(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetSection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "widgetIds": + return ec.fieldContext_WidgetArea_widgetIds(ctx, field) + case "align": + return ec.fieldContext_WidgetArea_align(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetArea", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetZone_left(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetZone) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetZone_left(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Left, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetSection) + fc.Result = res + return ec.marshalOWidgetSection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetZone_left(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetZone", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "top": + return ec.fieldContext_WidgetSection_top(ctx, field) + case "middle": + return ec.fieldContext_WidgetSection_middle(ctx, field) + case "bottom": + return ec.fieldContext_WidgetSection_bottom(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetSection", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetZone_center(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetZone) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetZone_center(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Center, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetSection) + fc.Result = res + return ec.marshalOWidgetSection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetZone_center(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetZone", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "top": + return ec.fieldContext_WidgetSection_top(ctx, field) + case "middle": + return ec.fieldContext_WidgetSection_middle(ctx, field) + case "bottom": + return ec.fieldContext_WidgetSection_bottom(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetSection", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _WidgetZone_right(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetZone) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetZone_right(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Right, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetSection) + fc.Result = res + return ec.marshalOWidgetSection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetZone_right(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetZone", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "top": + return ec.fieldContext_WidgetSection_top(ctx, field) + case "middle": + return ec.fieldContext_WidgetSection_middle(ctx, field) + case "bottom": + return ec.fieldContext_WidgetSection_bottom(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetSection", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_locations(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_locations(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Locations, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]string) + fc.Result = res + return ec.marshalN__DirectiveLocation2แš•stringแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_locations(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type __DirectiveLocation does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_args(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Args, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_args(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___InputValue_name(ctx, field) + case "description": + return ec.fieldContext___InputValue_description(ctx, field) + case "type": + return ec.fieldContext___InputValue_type(ctx, field) + case "defaultValue": + return ec.fieldContext___InputValue_defaultValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __InputValue", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_isRepeatable(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_isRepeatable(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsRepeatable, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_isRepeatable(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___EnumValue_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___EnumValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___EnumValue_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_isDeprecated(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsDeprecated(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_deprecationReason(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DeprecationReason(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_args(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Args, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_args(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___InputValue_name(ctx, field) + case "description": + return ec.fieldContext___InputValue_description(ctx, field) + case "type": + return ec.fieldContext___InputValue_type(ctx, field) + case "defaultValue": + return ec.fieldContext___InputValue_defaultValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __InputValue", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_isDeprecated(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsDeprecated(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_isDeprecated(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_deprecationReason(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DeprecationReason(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_deprecationReason(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_type(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_defaultValue(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_defaultValue(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DefaultValue, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_defaultValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_types(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_types(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Types(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.Type) + fc.Result = res + return ec.marshalN__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_types(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_queryType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_queryType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.QueryType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_queryType(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_mutationType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_mutationType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.MutationType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_mutationType(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_subscriptionType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_subscriptionType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SubscriptionType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_subscriptionType(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_directives(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_directives(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Directives(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.Directive) + fc.Result = res + return ec.marshalN__Directive2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšDirectiveแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_directives(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___Directive_name(ctx, field) + case "description": + return ec.fieldContext___Directive_description(ctx, field) + case "locations": + return ec.fieldContext___Directive_locations(ctx, field) + case "args": + return ec.fieldContext___Directive_args(ctx, field) + case "isRepeatable": + return ec.fieldContext___Directive_isRepeatable(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Directive", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_kind(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_kind(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Kind(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalN__TypeKind2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_kind(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type __TypeKind does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_fields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_fields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields(fc.Args["includeDeprecated"].(bool)), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.Field) + fc.Result = res + return ec.marshalO__Field2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___Field_name(ctx, field) + case "description": + return ec.fieldContext___Field_description(ctx, field) + case "args": + return ec.fieldContext___Field_args(ctx, field) + case "type": + return ec.fieldContext___Field_type(ctx, field) + case "isDeprecated": + return ec.fieldContext___Field_isDeprecated(ctx, field) + case "deprecationReason": + return ec.fieldContext___Field_deprecationReason(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Field", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field___Type_fields_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) ___Type_interfaces(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_interfaces(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Interfaces(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.Type) + fc.Result = res + return ec.marshalO__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_interfaces(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_possibleTypes(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_possibleTypes(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PossibleTypes(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.Type) + fc.Result = res + return ec.marshalO__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_possibleTypes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_enumValues(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_enumValues(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.EnumValues(fc.Args["includeDeprecated"].(bool)), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.EnumValue) + fc.Result = res + return ec.marshalO__EnumValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšEnumValueแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_enumValues(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___EnumValue_name(ctx, field) + case "description": + return ec.fieldContext___EnumValue_description(ctx, field) + case "isDeprecated": + return ec.fieldContext___EnumValue_isDeprecated(ctx, field) + case "deprecationReason": + return ec.fieldContext___EnumValue_deprecationReason(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __EnumValue", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field___Type_enumValues_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) ___Type_inputFields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_inputFields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.InputFields(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalO__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_inputFields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___InputValue_name(ctx, field) + case "description": + return ec.fieldContext___InputValue_description(ctx, field) + case "type": + return ec.fieldContext___InputValue_type(ctx, field) + case "defaultValue": + return ec.fieldContext___InputValue_defaultValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __InputValue", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_ofType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OfType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_ofType(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_specifiedByURL(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_specifiedByURL(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SpecifiedByURL(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_specifiedByURL(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +// endregion **************************** field.gotpl ***************************** + +// region **************************** input.gotpl ***************************** + +func (ec *executionContext) unmarshalInputAddClusterInput(ctx context.Context, obj interface{}) (gqlmodel.AddClusterInput, error) { + var it gqlmodel.AddClusterInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.AddDatasetSchemaInput, error) { + var it gqlmodel.AddDatasetSchemaInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "representativefield": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("representativefield")) + it.Representativefield, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddDynamicDatasetInput(ctx context.Context, obj interface{}) (gqlmodel.AddDynamicDatasetInput, error) { + var it gqlmodel.AddDynamicDatasetInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "datasetSchemaId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaId")) + it.DatasetSchemaID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "author": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("author")) + it.Author, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "content": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("content")) + it.Content, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "lat": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lat")) + it.Lat, err = ec.unmarshalOFloat2แš–float64(ctx, v) + if err != nil { + return it, err + } + case "lng": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lng")) + it.Lng, err = ec.unmarshalOFloat2แš–float64(ctx, v) + if err != nil { + return it, err + } + case "target": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("target")) + it.Target, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddDynamicDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.AddDynamicDatasetSchemaInput, error) { + var it gqlmodel.AddDynamicDatasetSchemaInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddInfoboxFieldInput(ctx context.Context, obj interface{}) (gqlmodel.AddInfoboxFieldInput, error) { + var it gqlmodel.AddInfoboxFieldInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "extensionId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) + it.ExtensionID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context, obj interface{}) (gqlmodel.AddLayerGroupInput, error) { + var it gqlmodel.AddLayerGroupInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "parentLayerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("parentLayerId")) + it.ParentLayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "extensionId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) + it.ExtensionID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "linkedDatasetSchemaID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("linkedDatasetSchemaID")) + it.LinkedDatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "representativeFieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("representativeFieldId")) + it.RepresentativeFieldID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddLayerItemInput(ctx context.Context, obj interface{}) (gqlmodel.AddLayerItemInput, error) { + var it gqlmodel.AddLayerItemInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "parentLayerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("parentLayerId")) + it.ParentLayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "extensionId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) + it.ExtensionID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "lat": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lat")) + it.Lat, err = ec.unmarshalOFloat2แš–float64(ctx, v) + if err != nil { + return it, err + } + case "lng": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lng")) + it.Lng, err = ec.unmarshalOFloat2แš–float64(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddMemberToTeamInput(ctx context.Context, obj interface{}) (gqlmodel.AddMemberToTeamInput, error) { + var it gqlmodel.AddMemberToTeamInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "userId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "role": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("role")) + it.Role, err = ec.unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRole(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddPropertyItemInput(ctx context.Context, obj interface{}) (gqlmodel.AddPropertyItemInput, error) { + var it gqlmodel.AddPropertyItemInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaGroupId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "nameFieldValue": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nameFieldValue")) + it.NameFieldValue, err = ec.unmarshalOAny2interface(ctx, v) + if err != nil { + return it, err + } + case "nameFieldType": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nameFieldType")) + it.NameFieldType, err = ec.unmarshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddWidgetInput(ctx context.Context, obj interface{}) (gqlmodel.AddWidgetInput, error) { + var it gqlmodel.AddWidgetInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "extensionId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) + it.ExtensionID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAttachTagItemToGroupInput(ctx context.Context, obj interface{}) (gqlmodel.AttachTagItemToGroupInput, error) { + var it gqlmodel.AttachTagItemToGroupInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "itemID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemID")) + it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "groupID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("groupID")) + it.GroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAttachTagToLayerInput(ctx context.Context, obj interface{}) (gqlmodel.AttachTagToLayerInput, error) { + var it gqlmodel.AttachTagToLayerInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "tagID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagID")) + it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "layerID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerID")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateAssetInput(ctx context.Context, obj interface{}) (gqlmodel.CreateAssetInput, error) { + var it gqlmodel.CreateAssetInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "file": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("file")) + it.File, err = ec.unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateInfoboxInput(ctx context.Context, obj interface{}) (gqlmodel.CreateInfoboxInput, error) { + var it gqlmodel.CreateInfoboxInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateProjectInput(ctx context.Context, obj interface{}) (gqlmodel.CreateProjectInput, error) { + var it gqlmodel.CreateProjectInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "visualizer": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("visualizer")) + it.Visualizer, err = ec.unmarshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "description": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("description")) + it.Description, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "imageUrl": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("imageUrl")) + it.ImageURL, err = ec.unmarshalOURL2แš–netแš‹urlแšURL(ctx, v) + if err != nil { + return it, err + } + case "alias": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("alias")) + it.Alias, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "archived": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("archived")) + it.Archived, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateSceneInput(ctx context.Context, obj interface{}) (gqlmodel.CreateSceneInput, error) { + var it gqlmodel.CreateSceneInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "projectId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateTagGroupInput(ctx context.Context, obj interface{}) (gqlmodel.CreateTagGroupInput, error) { + var it gqlmodel.CreateTagGroupInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "label": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("label")) + it.Label, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "tags": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tags")) + it.Tags, err = ec.unmarshalOID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateTagItemInput(ctx context.Context, obj interface{}) (gqlmodel.CreateTagItemInput, error) { + var it gqlmodel.CreateTagItemInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "label": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("label")) + it.Label, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "parent": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("parent")) + it.Parent, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "linkedDatasetSchemaID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("linkedDatasetSchemaID")) + it.LinkedDatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "linkedDatasetID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("linkedDatasetID")) + it.LinkedDatasetID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "linkedDatasetField": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("linkedDatasetField")) + it.LinkedDatasetField, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateTeamInput(ctx context.Context, obj interface{}) (gqlmodel.CreateTeamInput, error) { + var it gqlmodel.CreateTeamInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputDeleteMeInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteMeInput, error) { + var it gqlmodel.DeleteMeInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "userId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputDeleteProjectInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteProjectInput, error) { + var it gqlmodel.DeleteProjectInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "projectId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputDeleteTeamInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteTeamInput, error) { + var it gqlmodel.DeleteTeamInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputDetachTagFromLayerInput(ctx context.Context, obj interface{}) (gqlmodel.DetachTagFromLayerInput, error) { + var it gqlmodel.DetachTagFromLayerInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "tagID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagID")) + it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "layerID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerID")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputDetachTagItemFromGroupInput(ctx context.Context, obj interface{}) (gqlmodel.DetachTagItemFromGroupInput, error) { + var it gqlmodel.DetachTagItemFromGroupInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "itemID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemID")) + it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "groupID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("groupID")) + it.GroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputImportDatasetFromGoogleSheetInput(ctx context.Context, obj interface{}) (gqlmodel.ImportDatasetFromGoogleSheetInput, error) { + var it gqlmodel.ImportDatasetFromGoogleSheetInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "accessToken": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("accessToken")) + it.AccessToken, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "fileId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fileId")) + it.FileID, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "sheetName": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sheetName")) + it.SheetName, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "datasetSchemaId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaId")) + it.DatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputImportDatasetInput(ctx context.Context, obj interface{}) (gqlmodel.ImportDatasetInput, error) { + var it gqlmodel.ImportDatasetInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "file": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("file")) + it.File, err = ec.unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + if err != nil { + return it, err + } + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "datasetSchemaId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaId")) + it.DatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputImportLayerInput(ctx context.Context, obj interface{}) (gqlmodel.ImportLayerInput, error) { + var it gqlmodel.ImportLayerInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "file": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("file")) + it.File, err = ec.unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + if err != nil { + return it, err + } + case "format": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("format")) + it.Format, err = ec.unmarshalNLayerEncodingFormat2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerEncodingFormat(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputInstallPluginInput(ctx context.Context, obj interface{}) (gqlmodel.InstallPluginInput, error) { + var it gqlmodel.InstallPluginInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx context.Context, obj interface{}) (gqlmodel.LinkDatasetToPropertyValueInput, error) { + var it gqlmodel.LinkDatasetToPropertyValueInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaGroupId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "datasetSchemaIds": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaIds")) + it.DatasetSchemaIds, err = ec.unmarshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, v) + if err != nil { + return it, err + } + case "datasetSchemaFieldIds": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaFieldIds")) + it.DatasetSchemaFieldIds, err = ec.unmarshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, v) + if err != nil { + return it, err + } + case "datasetIds": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetIds")) + it.DatasetIds, err = ec.unmarshalOID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputMoveInfoboxFieldInput(ctx context.Context, obj interface{}) (gqlmodel.MoveInfoboxFieldInput, error) { + var it gqlmodel.MoveInfoboxFieldInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "infoboxFieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("infoboxFieldId")) + it.InfoboxFieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalNInt2int(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputMoveLayerInput(ctx context.Context, obj interface{}) (gqlmodel.MoveLayerInput, error) { + var it gqlmodel.MoveLayerInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "destLayerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("destLayerId")) + it.DestLayerID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputMovePropertyItemInput(ctx context.Context, obj interface{}) (gqlmodel.MovePropertyItemInput, error) { + var it gqlmodel.MovePropertyItemInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaGroupId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalNInt2int(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputPagination(ctx context.Context, obj interface{}) (gqlmodel.Pagination, error) { + var it gqlmodel.Pagination + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "first": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + it.First, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "last": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + it.Last, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "after": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + it.After, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, v) + if err != nil { + return it, err + } + case "before": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + it.Before, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputPublishProjectInput(ctx context.Context, obj interface{}) (gqlmodel.PublishProjectInput, error) { + var it gqlmodel.PublishProjectInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "projectId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "alias": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("alias")) + it.Alias, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "status": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("status")) + it.Status, err = ec.unmarshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishmentStatus(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveAssetInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveAssetInput, error) { + var it gqlmodel.RemoveAssetInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "assetId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("assetId")) + it.AssetID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveClusterInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveClusterInput, error) { + var it gqlmodel.RemoveClusterInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "clusterId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("clusterId")) + it.ClusterID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveDatasetSchemaInput, error) { + var it gqlmodel.RemoveDatasetSchemaInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "schemaId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaId")) + it.SchemaID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "force": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("force")) + it.Force, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveInfoboxFieldInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveInfoboxFieldInput, error) { + var it gqlmodel.RemoveInfoboxFieldInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "infoboxFieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("infoboxFieldId")) + it.InfoboxFieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveInfoboxInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveInfoboxInput, error) { + var it gqlmodel.RemoveInfoboxInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveLayerInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveLayerInput, error) { + var it gqlmodel.RemoveLayerInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveMemberFromTeamInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveMemberFromTeamInput, error) { + var it gqlmodel.RemoveMemberFromTeamInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "userId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveMyAuthInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveMyAuthInput, error) { + var it gqlmodel.RemoveMyAuthInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "auth": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("auth")) + it.Auth, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemovePropertyFieldInput(ctx context.Context, obj interface{}) (gqlmodel.RemovePropertyFieldInput, error) { + var it gqlmodel.RemovePropertyFieldInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaGroupId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemovePropertyItemInput(ctx context.Context, obj interface{}) (gqlmodel.RemovePropertyItemInput, error) { + var it gqlmodel.RemovePropertyItemInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaGroupId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveTagInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveTagInput, error) { + var it gqlmodel.RemoveTagInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "tagID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagID")) + it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveWidgetInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveWidgetInput, error) { + var it gqlmodel.RemoveWidgetInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "widgetId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("widgetId")) + it.WidgetID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputSignupInput(ctx context.Context, obj interface{}) (gqlmodel.SignupInput, error) { + var it gqlmodel.SignupInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "lang": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + it.Lang, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, v) + if err != nil { + return it, err + } + case "theme": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("theme")) + it.Theme, err = ec.unmarshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx, v) + if err != nil { + return it, err + } + case "userId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) + it.UserID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "secret": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("secret")) + it.Secret, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputSyncDatasetInput(ctx context.Context, obj interface{}) (gqlmodel.SyncDatasetInput, error) { + var it gqlmodel.SyncDatasetInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "url": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("url")) + it.URL, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUninstallPluginInput(ctx context.Context, obj interface{}) (gqlmodel.UninstallPluginInput, error) { + var it gqlmodel.UninstallPluginInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.Context, obj interface{}) (gqlmodel.UnlinkPropertyValueInput, error) { + var it gqlmodel.UnlinkPropertyValueInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaGroupId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateClusterInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateClusterInput, error) { + var it gqlmodel.UpdateClusterInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "clusterId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("clusterId")) + it.ClusterID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateDatasetSchemaInput, error) { + var it gqlmodel.UpdateDatasetSchemaInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "schemaId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaId")) + it.SchemaID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateLayerInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateLayerInput, error) { + var it gqlmodel.UpdateLayerInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "visible": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("visible")) + it.Visible, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateMeInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateMeInput, error) { + var it gqlmodel.UpdateMeInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "email": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("email")) + it.Email, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "lang": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + it.Lang, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, v) + if err != nil { + return it, err + } + case "theme": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("theme")) + it.Theme, err = ec.unmarshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx, v) + if err != nil { + return it, err + } + case "password": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("password")) + it.Password, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "passwordConfirmation": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("passwordConfirmation")) + it.PasswordConfirmation, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateMemberOfTeamInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateMemberOfTeamInput, error) { + var it gqlmodel.UpdateMemberOfTeamInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "userId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "role": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("role")) + it.Role, err = ec.unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRole(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateProjectInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateProjectInput, error) { + var it gqlmodel.UpdateProjectInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "projectId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "description": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("description")) + it.Description, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "archived": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("archived")) + it.Archived, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "isBasicAuthActive": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("isBasicAuthActive")) + it.IsBasicAuthActive, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "basicAuthUsername": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("basicAuthUsername")) + it.BasicAuthUsername, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "basicAuthPassword": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("basicAuthPassword")) + it.BasicAuthPassword, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "alias": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("alias")) + it.Alias, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "imageUrl": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("imageUrl")) + it.ImageURL, err = ec.unmarshalOURL2แš–netแš‹urlแšURL(ctx, v) + if err != nil { + return it, err + } + case "publicTitle": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("publicTitle")) + it.PublicTitle, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "publicDescription": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("publicDescription")) + it.PublicDescription, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "publicImage": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("publicImage")) + it.PublicImage, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "publicNoIndex": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("publicNoIndex")) + it.PublicNoIndex, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "deleteImageUrl": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("deleteImageUrl")) + it.DeleteImageURL, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "deletePublicImage": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("deletePublicImage")) + it.DeletePublicImage, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdatePropertyItemInput(ctx context.Context, obj interface{}) (gqlmodel.UpdatePropertyItemInput, error) { + var it gqlmodel.UpdatePropertyItemInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaGroupId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "operations": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("operations")) + it.Operations, err = ec.unmarshalNUpdatePropertyItemOperationInput2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemOperationInputแš„(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdatePropertyItemOperationInput(ctx context.Context, obj interface{}) (gqlmodel.UpdatePropertyItemOperationInput, error) { + var it gqlmodel.UpdatePropertyItemOperationInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "operation": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("operation")) + it.Operation, err = ec.unmarshalNListOperation2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšListOperation(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "nameFieldValue": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nameFieldValue")) + it.NameFieldValue, err = ec.unmarshalOAny2interface(ctx, v) + if err != nil { + return it, err + } + case "nameFieldType": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nameFieldType")) + it.NameFieldType, err = ec.unmarshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.Context, obj interface{}) (gqlmodel.UpdatePropertyValueInput, error) { + var it gqlmodel.UpdatePropertyValueInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaGroupId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "value": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("value")) + it.Value, err = ec.unmarshalOAny2interface(ctx, v) + if err != nil { + return it, err + } + case "type": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("type")) + it.Type, err = ec.unmarshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateTagInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateTagInput, error) { + var it gqlmodel.UpdateTagInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "tagId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagId")) + it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "label": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("label")) + it.Label, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateTeamInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateTeamInput, error) { + var it gqlmodel.UpdateTeamInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateWidgetAlignSystemInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateWidgetAlignSystemInput, error) { + var it gqlmodel.UpdateWidgetAlignSystemInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "location": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("location")) + it.Location, err = ec.unmarshalNWidgetLocationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocationInput(ctx, v) + if err != nil { + return it, err + } + case "align": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("align")) + it.Align, err = ec.unmarshalOWidgetAreaAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateWidgetInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateWidgetInput, error) { + var it gqlmodel.UpdateWidgetInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "widgetId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("widgetId")) + it.WidgetID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "enabled": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("enabled")) + it.Enabled, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "location": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("location")) + it.Location, err = ec.unmarshalOWidgetLocationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocationInput(ctx, v) + if err != nil { + return it, err + } + case "extended": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extended")) + it.Extended, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpgradePluginInput(ctx context.Context, obj interface{}) (gqlmodel.UpgradePluginInput, error) { + var it gqlmodel.UpgradePluginInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "toPluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("toPluginId")) + it.ToPluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUploadFileToPropertyInput(ctx context.Context, obj interface{}) (gqlmodel.UploadFileToPropertyInput, error) { + var it gqlmodel.UploadFileToPropertyInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaGroupId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "file": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("file")) + it.File, err = ec.unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUploadPluginInput(ctx context.Context, obj interface{}) (gqlmodel.UploadPluginInput, error) { + var it gqlmodel.UploadPluginInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) + if err != nil { + return it, err + } + case "file": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("file")) + it.File, err = ec.unmarshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + if err != nil { + return it, err + } + case "url": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("url")) + it.URL, err = ec.unmarshalOURL2แš–netแš‹urlแšURL(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputWidgetLocationInput(ctx context.Context, obj interface{}) (gqlmodel.WidgetLocationInput, error) { + var it gqlmodel.WidgetLocationInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "zone": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("zone")) + it.Zone, err = ec.unmarshalNWidgetZoneType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZoneType(ctx, v) + if err != nil { + return it, err + } + case "section": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("section")) + it.Section, err = ec.unmarshalNWidgetSectionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSectionType(ctx, v) + if err != nil { + return it, err + } + case "area": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("area")) + it.Area, err = ec.unmarshalNWidgetAreaType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaType(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +// endregion **************************** input.gotpl ***************************** + +// region ************************** interface.gotpl *************************** + +func (ec *executionContext) _Layer(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.Layer) graphql.Marshaler { + switch obj := (obj).(type) { + case nil: + return graphql.Null + case gqlmodel.LayerItem: + return ec._LayerItem(ctx, sel, &obj) + case *gqlmodel.LayerItem: + if obj == nil { + return graphql.Null + } + return ec._LayerItem(ctx, sel, obj) + case gqlmodel.LayerGroup: + return ec._LayerGroup(ctx, sel, &obj) + case *gqlmodel.LayerGroup: + if obj == nil { + return graphql.Null + } + return ec._LayerGroup(ctx, sel, obj) + default: + panic(fmt.Errorf("unexpected type %T", obj)) + } +} + +func (ec *executionContext) _LayerTag(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.LayerTag) graphql.Marshaler { + switch obj := (obj).(type) { + case nil: + return graphql.Null + case gqlmodel.LayerTagItem: + return ec._LayerTagItem(ctx, sel, &obj) + case *gqlmodel.LayerTagItem: + if obj == nil { + return graphql.Null + } + return ec._LayerTagItem(ctx, sel, obj) + case gqlmodel.LayerTagGroup: + return ec._LayerTagGroup(ctx, sel, &obj) + case *gqlmodel.LayerTagGroup: + if obj == nil { + return graphql.Null + } + return ec._LayerTagGroup(ctx, sel, obj) + default: + panic(fmt.Errorf("unexpected type %T", obj)) + } +} + +func (ec *executionContext) _Node(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.Node) graphql.Marshaler { + switch obj := (obj).(type) { + case nil: + return graphql.Null + case gqlmodel.Asset: + return ec._Asset(ctx, sel, &obj) + case *gqlmodel.Asset: + if obj == nil { + return graphql.Null + } + return ec._Asset(ctx, sel, obj) + case gqlmodel.User: + return ec._User(ctx, sel, &obj) + case *gqlmodel.User: + if obj == nil { + return graphql.Null + } + return ec._User(ctx, sel, obj) + case gqlmodel.Team: + return ec._Team(ctx, sel, &obj) + case *gqlmodel.Team: + if obj == nil { + return graphql.Null + } + return ec._Team(ctx, sel, obj) + case gqlmodel.Project: + return ec._Project(ctx, sel, &obj) + case *gqlmodel.Project: + if obj == nil { + return graphql.Null + } + return ec._Project(ctx, sel, obj) + case gqlmodel.Scene: + return ec._Scene(ctx, sel, &obj) + case *gqlmodel.Scene: + if obj == nil { + return graphql.Null + } + return ec._Scene(ctx, sel, obj) + case gqlmodel.Property: + return ec._Property(ctx, sel, &obj) + case *gqlmodel.Property: + if obj == nil { + return graphql.Null + } + return ec._Property(ctx, sel, obj) + case gqlmodel.DatasetSchema: + return ec._DatasetSchema(ctx, sel, &obj) + case *gqlmodel.DatasetSchema: + if obj == nil { + return graphql.Null + } + return ec._DatasetSchema(ctx, sel, obj) + case gqlmodel.DatasetSchemaField: + return ec._DatasetSchemaField(ctx, sel, &obj) + case *gqlmodel.DatasetSchemaField: + if obj == nil { + return graphql.Null + } + return ec._DatasetSchemaField(ctx, sel, obj) + case gqlmodel.Dataset: + return ec._Dataset(ctx, sel, &obj) + case *gqlmodel.Dataset: + if obj == nil { + return graphql.Null + } + return ec._Dataset(ctx, sel, obj) + default: + panic(fmt.Errorf("unexpected type %T", obj)) + } +} + +func (ec *executionContext) _PropertyItem(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.PropertyItem) graphql.Marshaler { + switch obj := (obj).(type) { + case nil: + return graphql.Null + case gqlmodel.PropertyGroup: + return ec._PropertyGroup(ctx, sel, &obj) + case *gqlmodel.PropertyGroup: + if obj == nil { + return graphql.Null + } + return ec._PropertyGroup(ctx, sel, obj) + case gqlmodel.PropertyGroupList: + return ec._PropertyGroupList(ctx, sel, &obj) + case *gqlmodel.PropertyGroupList: + if obj == nil { + return graphql.Null + } + return ec._PropertyGroupList(ctx, sel, obj) + default: + panic(fmt.Errorf("unexpected type %T", obj)) + } +} + +func (ec *executionContext) _Tag(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.Tag) graphql.Marshaler { + switch obj := (obj).(type) { + case nil: + return graphql.Null + case gqlmodel.TagItem: + return ec._TagItem(ctx, sel, &obj) + case *gqlmodel.TagItem: + if obj == nil { + return graphql.Null + } + return ec._TagItem(ctx, sel, obj) + case gqlmodel.TagGroup: + return ec._TagGroup(ctx, sel, &obj) + case *gqlmodel.TagGroup: + if obj == nil { + return graphql.Null + } + return ec._TagGroup(ctx, sel, obj) + default: + panic(fmt.Errorf("unexpected type %T", obj)) + } +} + +// endregion ************************** interface.gotpl *************************** + +// region **************************** object.gotpl **************************** + +var addClusterPayloadImplementors = []string{"AddClusterPayload"} + +func (ec *executionContext) _AddClusterPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddClusterPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addClusterPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddClusterPayload") + case "scene": + + out.Values[i] = ec._AddClusterPayload_scene(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "cluster": + + out.Values[i] = ec._AddClusterPayload_cluster(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addDatasetSchemaPayloadImplementors = []string{"AddDatasetSchemaPayload"} + +func (ec *executionContext) _AddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddDatasetSchemaPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addDatasetSchemaPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddDatasetSchemaPayload") + case "datasetSchema": + + out.Values[i] = ec._AddDatasetSchemaPayload_datasetSchema(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addDynamicDatasetPayloadImplementors = []string{"AddDynamicDatasetPayload"} + +func (ec *executionContext) _AddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddDynamicDatasetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addDynamicDatasetPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddDynamicDatasetPayload") + case "datasetSchema": + + out.Values[i] = ec._AddDynamicDatasetPayload_datasetSchema(ctx, field, obj) + + case "dataset": + + out.Values[i] = ec._AddDynamicDatasetPayload_dataset(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addDynamicDatasetSchemaPayloadImplementors = []string{"AddDynamicDatasetSchemaPayload"} + +func (ec *executionContext) _AddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddDynamicDatasetSchemaPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addDynamicDatasetSchemaPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddDynamicDatasetSchemaPayload") + case "datasetSchema": + + out.Values[i] = ec._AddDynamicDatasetSchemaPayload_datasetSchema(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addInfoboxFieldPayloadImplementors = []string{"AddInfoboxFieldPayload"} + +func (ec *executionContext) _AddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddInfoboxFieldPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addInfoboxFieldPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddInfoboxFieldPayload") + case "infoboxField": + + out.Values[i] = ec._AddInfoboxFieldPayload_infoboxField(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "layer": + + out.Values[i] = ec._AddInfoboxFieldPayload_layer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addLayerGroupPayloadImplementors = []string{"AddLayerGroupPayload"} + +func (ec *executionContext) _AddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddLayerGroupPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addLayerGroupPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddLayerGroupPayload") + case "layer": + + out.Values[i] = ec._AddLayerGroupPayload_layer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "parentLayer": + + out.Values[i] = ec._AddLayerGroupPayload_parentLayer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "index": + + out.Values[i] = ec._AddLayerGroupPayload_index(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addLayerItemPayloadImplementors = []string{"AddLayerItemPayload"} + +func (ec *executionContext) _AddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddLayerItemPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addLayerItemPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddLayerItemPayload") + case "layer": + + out.Values[i] = ec._AddLayerItemPayload_layer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "parentLayer": + + out.Values[i] = ec._AddLayerItemPayload_parentLayer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "index": + + out.Values[i] = ec._AddLayerItemPayload_index(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addMemberToTeamPayloadImplementors = []string{"AddMemberToTeamPayload"} + +func (ec *executionContext) _AddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddMemberToTeamPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addMemberToTeamPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddMemberToTeamPayload") + case "team": + + out.Values[i] = ec._AddMemberToTeamPayload_team(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addWidgetPayloadImplementors = []string{"AddWidgetPayload"} + +func (ec *executionContext) _AddWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddWidgetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addWidgetPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddWidgetPayload") + case "scene": + + out.Values[i] = ec._AddWidgetPayload_scene(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "sceneWidget": + + out.Values[i] = ec._AddWidgetPayload_sceneWidget(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var assetImplementors = []string{"Asset", "Node"} + +func (ec *executionContext) _Asset(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Asset) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, assetImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Asset") + case "id": + + out.Values[i] = ec._Asset_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "createdAt": + + out.Values[i] = ec._Asset_createdAt(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "teamId": + + out.Values[i] = ec._Asset_teamId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + + out.Values[i] = ec._Asset_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "size": + + out.Values[i] = ec._Asset_size(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "url": + + out.Values[i] = ec._Asset_url(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "contentType": + + out.Values[i] = ec._Asset_contentType(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "team": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Asset_team(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var assetConnectionImplementors = []string{"AssetConnection"} + +func (ec *executionContext) _AssetConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AssetConnection) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, assetConnectionImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AssetConnection") + case "edges": + + out.Values[i] = ec._AssetConnection_edges(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "nodes": + + out.Values[i] = ec._AssetConnection_nodes(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "pageInfo": + + out.Values[i] = ec._AssetConnection_pageInfo(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "totalCount": + + out.Values[i] = ec._AssetConnection_totalCount(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var assetEdgeImplementors = []string{"AssetEdge"} + +func (ec *executionContext) _AssetEdge(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AssetEdge) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, assetEdgeImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AssetEdge") + case "cursor": + + out.Values[i] = ec._AssetEdge_cursor(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "node": + + out.Values[i] = ec._AssetEdge_node(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var attachTagItemToGroupPayloadImplementors = []string{"AttachTagItemToGroupPayload"} + +func (ec *executionContext) _AttachTagItemToGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AttachTagItemToGroupPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, attachTagItemToGroupPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AttachTagItemToGroupPayload") + case "tag": + + out.Values[i] = ec._AttachTagItemToGroupPayload_tag(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var attachTagToLayerPayloadImplementors = []string{"AttachTagToLayerPayload"} + +func (ec *executionContext) _AttachTagToLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AttachTagToLayerPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, attachTagToLayerPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AttachTagToLayerPayload") + case "layer": + + out.Values[i] = ec._AttachTagToLayerPayload_layer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var cameraImplementors = []string{"Camera"} + +func (ec *executionContext) _Camera(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Camera) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, cameraImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Camera") + case "lat": + + out.Values[i] = ec._Camera_lat(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "lng": + + out.Values[i] = ec._Camera_lng(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "altitude": + + out.Values[i] = ec._Camera_altitude(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "heading": + + out.Values[i] = ec._Camera_heading(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "pitch": + + out.Values[i] = ec._Camera_pitch(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "roll": + + out.Values[i] = ec._Camera_roll(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "fov": + + out.Values[i] = ec._Camera_fov(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var clusterImplementors = []string{"Cluster"} + +func (ec *executionContext) _Cluster(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Cluster) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, clusterImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Cluster") + case "id": + + out.Values[i] = ec._Cluster_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + + out.Values[i] = ec._Cluster_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + + out.Values[i] = ec._Cluster_propertyId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "property": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Cluster_property(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var createAssetPayloadImplementors = []string{"CreateAssetPayload"} + +func (ec *executionContext) _CreateAssetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateAssetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, createAssetPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CreateAssetPayload") + case "asset": + + out.Values[i] = ec._CreateAssetPayload_asset(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var createInfoboxPayloadImplementors = []string{"CreateInfoboxPayload"} + +func (ec *executionContext) _CreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateInfoboxPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, createInfoboxPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CreateInfoboxPayload") + case "layer": + + out.Values[i] = ec._CreateInfoboxPayload_layer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var createScenePayloadImplementors = []string{"CreateScenePayload"} + +func (ec *executionContext) _CreateScenePayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateScenePayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, createScenePayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CreateScenePayload") + case "scene": + + out.Values[i] = ec._CreateScenePayload_scene(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var createTagGroupPayloadImplementors = []string{"CreateTagGroupPayload"} + +func (ec *executionContext) _CreateTagGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateTagGroupPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, createTagGroupPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CreateTagGroupPayload") + case "tag": + + out.Values[i] = ec._CreateTagGroupPayload_tag(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var createTagItemPayloadImplementors = []string{"CreateTagItemPayload"} + +func (ec *executionContext) _CreateTagItemPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateTagItemPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, createTagItemPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CreateTagItemPayload") + case "tag": + + out.Values[i] = ec._CreateTagItemPayload_tag(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "parent": + + out.Values[i] = ec._CreateTagItemPayload_parent(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var createTeamPayloadImplementors = []string{"CreateTeamPayload"} + +func (ec *executionContext) _CreateTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateTeamPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, createTeamPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CreateTeamPayload") + case "team": + + out.Values[i] = ec._CreateTeamPayload_team(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetImplementors = []string{"Dataset", "Node"} + +func (ec *executionContext) _Dataset(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Dataset) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Dataset") + case "id": + + out.Values[i] = ec._Dataset_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "source": + + out.Values[i] = ec._Dataset_source(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + + out.Values[i] = ec._Dataset_schemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fields": + + out.Values[i] = ec._Dataset_fields(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Dataset_schema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "name": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Dataset_name(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetConnectionImplementors = []string{"DatasetConnection"} + +func (ec *executionContext) _DatasetConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetConnection) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetConnectionImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetConnection") + case "edges": + + out.Values[i] = ec._DatasetConnection_edges(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "nodes": + + out.Values[i] = ec._DatasetConnection_nodes(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "pageInfo": + + out.Values[i] = ec._DatasetConnection_pageInfo(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "totalCount": + + out.Values[i] = ec._DatasetConnection_totalCount(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetEdgeImplementors = []string{"DatasetEdge"} + +func (ec *executionContext) _DatasetEdge(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetEdge) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetEdgeImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetEdge") + case "cursor": + + out.Values[i] = ec._DatasetEdge_cursor(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "node": + + out.Values[i] = ec._DatasetEdge_node(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetFieldImplementors = []string{"DatasetField"} + +func (ec *executionContext) _DatasetField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetFieldImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetField") + case "fieldId": + + out.Values[i] = ec._DatasetField_fieldId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + + out.Values[i] = ec._DatasetField_schemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "source": + + out.Values[i] = ec._DatasetField_source(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "type": + + out.Values[i] = ec._DatasetField_type(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "value": + + out.Values[i] = ec._DatasetField_value(ctx, field, obj) + + case "schema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetField_schema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "field": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetField_field(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "valueRef": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetField_valueRef(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetSchemaImplementors = []string{"DatasetSchema", "Node"} + +func (ec *executionContext) _DatasetSchema(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetSchema) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetSchema") + case "id": + + out.Values[i] = ec._DatasetSchema_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "source": + + out.Values[i] = ec._DatasetSchema_source(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + + out.Values[i] = ec._DatasetSchema_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "sceneId": + + out.Values[i] = ec._DatasetSchema_sceneId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fields": + + out.Values[i] = ec._DatasetSchema_fields(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "totalCount": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetSchema_totalCount(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "representativeFieldId": + + out.Values[i] = ec._DatasetSchema_representativeFieldId(ctx, field, obj) + + case "dynamic": + + out.Values[i] = ec._DatasetSchema_dynamic(ctx, field, obj) + + case "datasets": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetSchema_datasets(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scene": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetSchema_scene(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "representativeField": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetSchema_representativeField(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetSchemaConnectionImplementors = []string{"DatasetSchemaConnection"} + +func (ec *executionContext) _DatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetSchemaConnection) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaConnectionImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetSchemaConnection") + case "edges": + + out.Values[i] = ec._DatasetSchemaConnection_edges(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "nodes": + + out.Values[i] = ec._DatasetSchemaConnection_nodes(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "pageInfo": + + out.Values[i] = ec._DatasetSchemaConnection_pageInfo(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "totalCount": + + out.Values[i] = ec._DatasetSchemaConnection_totalCount(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetSchemaEdgeImplementors = []string{"DatasetSchemaEdge"} + +func (ec *executionContext) _DatasetSchemaEdge(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetSchemaEdge) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaEdgeImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetSchemaEdge") + case "cursor": + + out.Values[i] = ec._DatasetSchemaEdge_cursor(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "node": + + out.Values[i] = ec._DatasetSchemaEdge_node(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetSchemaFieldImplementors = []string{"DatasetSchemaField", "Node"} + +func (ec *executionContext) _DatasetSchemaField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetSchemaField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaFieldImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetSchemaField") + case "id": + + out.Values[i] = ec._DatasetSchemaField_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "source": + + out.Values[i] = ec._DatasetSchemaField_source(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + + out.Values[i] = ec._DatasetSchemaField_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "type": + + out.Values[i] = ec._DatasetSchemaField_type(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + + out.Values[i] = ec._DatasetSchemaField_schemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "refId": + + out.Values[i] = ec._DatasetSchemaField_refId(ctx, field, obj) + + case "schema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetSchemaField_schema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "ref": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetSchemaField_ref(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var deleteMePayloadImplementors = []string{"DeleteMePayload"} + +func (ec *executionContext) _DeleteMePayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DeleteMePayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, deleteMePayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DeleteMePayload") + case "userId": + + out.Values[i] = ec._DeleteMePayload_userId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var deleteProjectPayloadImplementors = []string{"DeleteProjectPayload"} + +func (ec *executionContext) _DeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DeleteProjectPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, deleteProjectPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DeleteProjectPayload") + case "projectId": + + out.Values[i] = ec._DeleteProjectPayload_projectId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var deleteTeamPayloadImplementors = []string{"DeleteTeamPayload"} + +func (ec *executionContext) _DeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DeleteTeamPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, deleteTeamPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DeleteTeamPayload") + case "teamId": + + out.Values[i] = ec._DeleteTeamPayload_teamId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var detachTagFromLayerPayloadImplementors = []string{"DetachTagFromLayerPayload"} + +func (ec *executionContext) _DetachTagFromLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DetachTagFromLayerPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, detachTagFromLayerPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DetachTagFromLayerPayload") + case "layer": + + out.Values[i] = ec._DetachTagFromLayerPayload_layer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var detachTagItemFromGroupPayloadImplementors = []string{"DetachTagItemFromGroupPayload"} + +func (ec *executionContext) _DetachTagItemFromGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DetachTagItemFromGroupPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, detachTagItemFromGroupPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DetachTagItemFromGroupPayload") + case "tag": + + out.Values[i] = ec._DetachTagItemFromGroupPayload_tag(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var importDatasetPayloadImplementors = []string{"ImportDatasetPayload"} + +func (ec *executionContext) _ImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ImportDatasetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, importDatasetPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ImportDatasetPayload") + case "datasetSchema": + + out.Values[i] = ec._ImportDatasetPayload_datasetSchema(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var importLayerPayloadImplementors = []string{"ImportLayerPayload"} + +func (ec *executionContext) _ImportLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ImportLayerPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, importLayerPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ImportLayerPayload") + case "layers": + + out.Values[i] = ec._ImportLayerPayload_layers(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "parentLayer": + + out.Values[i] = ec._ImportLayerPayload_parentLayer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var infoboxImplementors = []string{"Infobox"} + +func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Infobox) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, infoboxImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Infobox") + case "sceneId": + + out.Values[i] = ec._Infobox_sceneId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "layerId": + + out.Values[i] = ec._Infobox_layerId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + + out.Values[i] = ec._Infobox_propertyId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fields": + + out.Values[i] = ec._Infobox_fields(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "linkedDatasetId": + + out.Values[i] = ec._Infobox_linkedDatasetId(ctx, field, obj) + + case "layer": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Infobox_layer(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "property": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Infobox_property(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "linkedDataset": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Infobox_linkedDataset(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "merged": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Infobox_merged(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scene": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Infobox_scene(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var infoboxFieldImplementors = []string{"InfoboxField"} + +func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.InfoboxField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, infoboxFieldImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("InfoboxField") + case "id": + + out.Values[i] = ec._InfoboxField_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "sceneId": + + out.Values[i] = ec._InfoboxField_sceneId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "layerId": + + out.Values[i] = ec._InfoboxField_layerId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + + out.Values[i] = ec._InfoboxField_propertyId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "pluginId": + + out.Values[i] = ec._InfoboxField_pluginId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "extensionId": + + out.Values[i] = ec._InfoboxField_extensionId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "linkedDatasetId": + + out.Values[i] = ec._InfoboxField_linkedDatasetId(ctx, field, obj) + + case "layer": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_layer(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "infobox": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_infobox(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "property": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_property(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "plugin": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_plugin(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "extension": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_extension(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "linkedDataset": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_linkedDataset(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "merged": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_merged(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scene": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_scene(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scenePlugin": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_scenePlugin(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var installPluginPayloadImplementors = []string{"InstallPluginPayload"} + +func (ec *executionContext) _InstallPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.InstallPluginPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, installPluginPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("InstallPluginPayload") + case "scene": + + out.Values[i] = ec._InstallPluginPayload_scene(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "scenePlugin": + + out.Values[i] = ec._InstallPluginPayload_scenePlugin(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var latLngImplementors = []string{"LatLng"} + +func (ec *executionContext) _LatLng(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LatLng) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, latLngImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("LatLng") + case "lat": + + out.Values[i] = ec._LatLng_lat(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "lng": + + out.Values[i] = ec._LatLng_lng(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var latLngHeightImplementors = []string{"LatLngHeight"} + +func (ec *executionContext) _LatLngHeight(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LatLngHeight) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, latLngHeightImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("LatLngHeight") + case "lat": + + out.Values[i] = ec._LatLngHeight_lat(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "lng": + + out.Values[i] = ec._LatLngHeight_lng(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "height": + + out.Values[i] = ec._LatLngHeight_height(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var layerGroupImplementors = []string{"LayerGroup", "Layer"} + +func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerGroup) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, layerGroupImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("LayerGroup") + case "id": + + out.Values[i] = ec._LayerGroup_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "sceneId": + + out.Values[i] = ec._LayerGroup_sceneId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + + out.Values[i] = ec._LayerGroup_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "isVisible": + + out.Values[i] = ec._LayerGroup_isVisible(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + + out.Values[i] = ec._LayerGroup_propertyId(ctx, field, obj) + + case "pluginId": + + out.Values[i] = ec._LayerGroup_pluginId(ctx, field, obj) + + case "extensionId": + + out.Values[i] = ec._LayerGroup_extensionId(ctx, field, obj) + + case "infobox": + + out.Values[i] = ec._LayerGroup_infobox(ctx, field, obj) + + case "parentId": + + out.Values[i] = ec._LayerGroup_parentId(ctx, field, obj) + + case "linkedDatasetSchemaId": + + out.Values[i] = ec._LayerGroup_linkedDatasetSchemaId(ctx, field, obj) + + case "root": + + out.Values[i] = ec._LayerGroup_root(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "layerIds": + + out.Values[i] = ec._LayerGroup_layerIds(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "tags": + + out.Values[i] = ec._LayerGroup_tags(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "parent": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_parent(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "property": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_property(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "plugin": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_plugin(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "extension": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_extension(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "linkedDatasetSchema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_linkedDatasetSchema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "layers": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_layers(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scene": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_scene(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scenePlugin": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_scenePlugin(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var layerItemImplementors = []string{"LayerItem", "Layer"} + +func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerItem) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, layerItemImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("LayerItem") + case "id": + + out.Values[i] = ec._LayerItem_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "sceneId": + + out.Values[i] = ec._LayerItem_sceneId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + + out.Values[i] = ec._LayerItem_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "isVisible": + + out.Values[i] = ec._LayerItem_isVisible(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + + out.Values[i] = ec._LayerItem_propertyId(ctx, field, obj) + + case "pluginId": + + out.Values[i] = ec._LayerItem_pluginId(ctx, field, obj) + + case "extensionId": + + out.Values[i] = ec._LayerItem_extensionId(ctx, field, obj) + + case "infobox": + + out.Values[i] = ec._LayerItem_infobox(ctx, field, obj) + + case "parentId": + + out.Values[i] = ec._LayerItem_parentId(ctx, field, obj) + + case "linkedDatasetId": + + out.Values[i] = ec._LayerItem_linkedDatasetId(ctx, field, obj) + + case "tags": + + out.Values[i] = ec._LayerItem_tags(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "parent": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_parent(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "property": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_property(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "plugin": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_plugin(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "extension": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_extension(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "linkedDataset": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_linkedDataset(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "merged": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_merged(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scene": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_scene(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scenePlugin": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_scenePlugin(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var layerTagGroupImplementors = []string{"LayerTagGroup", "LayerTag"} + +func (ec *executionContext) _LayerTagGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerTagGroup) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, layerTagGroupImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("LayerTagGroup") + case "tagId": + + out.Values[i] = ec._LayerTagGroup_tagId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "children": + + out.Values[i] = ec._LayerTagGroup_children(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "tag": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerTagGroup_tag(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var layerTagItemImplementors = []string{"LayerTagItem", "LayerTag"} + +func (ec *executionContext) _LayerTagItem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerTagItem) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, layerTagItemImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("LayerTagItem") + case "tagId": + + out.Values[i] = ec._LayerTagItem_tagId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "tag": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerTagItem_tag(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var meImplementors = []string{"Me"} + +func (ec *executionContext) _Me(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Me) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, meImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Me") + case "id": + + out.Values[i] = ec._Me_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + + out.Values[i] = ec._Me_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "email": + + out.Values[i] = ec._Me_email(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "lang": + + out.Values[i] = ec._Me_lang(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "theme": + + out.Values[i] = ec._Me_theme(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "myTeamId": + + out.Values[i] = ec._Me_myTeamId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "auths": + + out.Values[i] = ec._Me_auths(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "teams": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Me_teams(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "myTeam": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Me_myTeam(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mergedInfoboxImplementors = []string{"MergedInfobox"} + +func (ec *executionContext) _MergedInfobox(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedInfobox) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mergedInfoboxImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MergedInfobox") + case "sceneID": + + out.Values[i] = ec._MergedInfobox_sceneID(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "property": + + out.Values[i] = ec._MergedInfobox_property(ctx, field, obj) + + case "fields": + + out.Values[i] = ec._MergedInfobox_fields(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "scene": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedInfobox_scene(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mergedInfoboxFieldImplementors = []string{"MergedInfoboxField"} + +func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedInfoboxField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mergedInfoboxFieldImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MergedInfoboxField") + case "originalId": + + out.Values[i] = ec._MergedInfoboxField_originalId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "sceneID": + + out.Values[i] = ec._MergedInfoboxField_sceneID(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "pluginId": + + out.Values[i] = ec._MergedInfoboxField_pluginId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "extensionId": + + out.Values[i] = ec._MergedInfoboxField_extensionId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "property": + + out.Values[i] = ec._MergedInfoboxField_property(ctx, field, obj) + + case "plugin": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedInfoboxField_plugin(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "extension": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedInfoboxField_extension(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scene": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedInfoboxField_scene(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scenePlugin": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedInfoboxField_scenePlugin(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mergedLayerImplementors = []string{"MergedLayer"} + +func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedLayer) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mergedLayerImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MergedLayer") + case "originalId": + + out.Values[i] = ec._MergedLayer_originalId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "parentId": + + out.Values[i] = ec._MergedLayer_parentId(ctx, field, obj) + + case "sceneID": + + out.Values[i] = ec._MergedLayer_sceneID(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "property": + + out.Values[i] = ec._MergedLayer_property(ctx, field, obj) + + case "infobox": + + out.Values[i] = ec._MergedLayer_infobox(ctx, field, obj) + + case "original": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedLayer_original(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "parent": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedLayer_parent(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scene": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedLayer_scene(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mergedPropertyImplementors = []string{"MergedProperty"} + +func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedProperty) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mergedPropertyImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MergedProperty") + case "originalId": + + out.Values[i] = ec._MergedProperty_originalId(ctx, field, obj) + + case "parentId": + + out.Values[i] = ec._MergedProperty_parentId(ctx, field, obj) + + case "schemaId": + + out.Values[i] = ec._MergedProperty_schemaId(ctx, field, obj) + + case "linkedDatasetId": + + out.Values[i] = ec._MergedProperty_linkedDatasetId(ctx, field, obj) + + case "original": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedProperty_original(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "parent": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedProperty_parent(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "schema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedProperty_schema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "linkedDataset": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedProperty_linkedDataset(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "groups": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedProperty_groups(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mergedPropertyFieldImplementors = []string{"MergedPropertyField"} + +func (ec *executionContext) _MergedPropertyField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedPropertyField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mergedPropertyFieldImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MergedPropertyField") + case "schemaId": + + out.Values[i] = ec._MergedPropertyField_schemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fieldId": + + out.Values[i] = ec._MergedPropertyField_fieldId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "value": + + out.Values[i] = ec._MergedPropertyField_value(ctx, field, obj) + + case "type": + + out.Values[i] = ec._MergedPropertyField_type(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "links": + + out.Values[i] = ec._MergedPropertyField_links(ctx, field, obj) + + case "overridden": + + out.Values[i] = ec._MergedPropertyField_overridden(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyField_schema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "field": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyField_field(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "actualValue": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyField_actualValue(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mergedPropertyGroupImplementors = []string{"MergedPropertyGroup"} + +func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedPropertyGroup) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mergedPropertyGroupImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MergedPropertyGroup") + case "originalPropertyId": + + out.Values[i] = ec._MergedPropertyGroup_originalPropertyId(ctx, field, obj) + + case "parentPropertyId": + + out.Values[i] = ec._MergedPropertyGroup_parentPropertyId(ctx, field, obj) + + case "originalId": + + out.Values[i] = ec._MergedPropertyGroup_originalId(ctx, field, obj) + + case "parentId": + + out.Values[i] = ec._MergedPropertyGroup_parentId(ctx, field, obj) + + case "schemaGroupId": + + out.Values[i] = ec._MergedPropertyGroup_schemaGroupId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + + out.Values[i] = ec._MergedPropertyGroup_schemaId(ctx, field, obj) + + case "linkedDatasetId": + + out.Values[i] = ec._MergedPropertyGroup_linkedDatasetId(ctx, field, obj) + + case "fields": + + out.Values[i] = ec._MergedPropertyGroup_fields(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "groups": + + out.Values[i] = ec._MergedPropertyGroup_groups(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "originalProperty": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyGroup_originalProperty(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "parentProperty": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyGroup_parentProperty(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "original": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyGroup_original(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "parent": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyGroup_parent(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "schema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyGroup_schema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "linkedDataset": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyGroup_linkedDataset(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var moveInfoboxFieldPayloadImplementors = []string{"MoveInfoboxFieldPayload"} + +func (ec *executionContext) _MoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MoveInfoboxFieldPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, moveInfoboxFieldPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MoveInfoboxFieldPayload") + case "infoboxFieldId": + + out.Values[i] = ec._MoveInfoboxFieldPayload_infoboxFieldId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "layer": + + out.Values[i] = ec._MoveInfoboxFieldPayload_layer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "index": + + out.Values[i] = ec._MoveInfoboxFieldPayload_index(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var moveLayerPayloadImplementors = []string{"MoveLayerPayload"} + +func (ec *executionContext) _MoveLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MoveLayerPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, moveLayerPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MoveLayerPayload") + case "layerId": + + out.Values[i] = ec._MoveLayerPayload_layerId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "fromParentLayer": + + out.Values[i] = ec._MoveLayerPayload_fromParentLayer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "toParentLayer": + + out.Values[i] = ec._MoveLayerPayload_toParentLayer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "index": + + out.Values[i] = ec._MoveLayerPayload_index(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mutationImplementors = []string{"Mutation"} + +func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mutationImplementors) + ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{ + Object: "Mutation", + }) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + innerCtx := graphql.WithRootFieldContext(ctx, &graphql.RootFieldContext{ + Object: field.Name, + Field: field, + }) + + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Mutation") + case "createAsset": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createAsset(ctx, field) + }) + + case "removeAsset": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeAsset(ctx, field) + }) + + case "signup": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_signup(ctx, field) + }) + + case "updateMe": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateMe(ctx, field) + }) + + case "removeMyAuth": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeMyAuth(ctx, field) + }) + + case "deleteMe": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_deleteMe(ctx, field) + }) + + case "createTeam": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createTeam(ctx, field) + }) + + case "deleteTeam": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_deleteTeam(ctx, field) + }) + + case "updateTeam": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateTeam(ctx, field) + }) + + case "addMemberToTeam": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addMemberToTeam(ctx, field) + }) + + case "removeMemberFromTeam": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeMemberFromTeam(ctx, field) + }) + + case "updateMemberOfTeam": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateMemberOfTeam(ctx, field) + }) + + case "createProject": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createProject(ctx, field) + }) + + case "updateProject": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateProject(ctx, field) + }) + + case "publishProject": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_publishProject(ctx, field) + }) + + case "deleteProject": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_deleteProject(ctx, field) + }) + + case "createScene": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createScene(ctx, field) + }) + + case "addWidget": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addWidget(ctx, field) + }) + + case "updateWidget": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateWidget(ctx, field) + }) + + case "updateWidgetAlignSystem": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateWidgetAlignSystem(ctx, field) + }) + + case "removeWidget": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeWidget(ctx, field) + }) + + case "installPlugin": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_installPlugin(ctx, field) + }) + + case "uninstallPlugin": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_uninstallPlugin(ctx, field) + }) + + case "uploadPlugin": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_uploadPlugin(ctx, field) + }) + + case "upgradePlugin": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_upgradePlugin(ctx, field) + }) + + case "addCluster": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addCluster(ctx, field) + }) + + case "updateCluster": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateCluster(ctx, field) + }) + + case "removeCluster": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeCluster(ctx, field) + }) + + case "updateDatasetSchema": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateDatasetSchema(ctx, field) + }) + + case "syncDataset": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_syncDataset(ctx, field) + }) + + case "addDynamicDatasetSchema": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addDynamicDatasetSchema(ctx, field) + }) + + case "addDynamicDataset": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addDynamicDataset(ctx, field) + }) + + case "removeDatasetSchema": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeDatasetSchema(ctx, field) + }) + + case "importDataset": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_importDataset(ctx, field) + }) + + case "importDatasetFromGoogleSheet": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_importDatasetFromGoogleSheet(ctx, field) + }) + + case "addDatasetSchema": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addDatasetSchema(ctx, field) + }) + + case "updatePropertyValue": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updatePropertyValue(ctx, field) + }) + + case "removePropertyField": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removePropertyField(ctx, field) + }) + + case "uploadFileToProperty": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_uploadFileToProperty(ctx, field) + }) + + case "linkDatasetToPropertyValue": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_linkDatasetToPropertyValue(ctx, field) + }) + + case "unlinkPropertyValue": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_unlinkPropertyValue(ctx, field) + }) + + case "addPropertyItem": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addPropertyItem(ctx, field) + }) + + case "movePropertyItem": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_movePropertyItem(ctx, field) + }) + + case "removePropertyItem": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removePropertyItem(ctx, field) + }) + + case "updatePropertyItems": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updatePropertyItems(ctx, field) + }) + + case "addLayerItem": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addLayerItem(ctx, field) + }) + + case "addLayerGroup": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addLayerGroup(ctx, field) + }) + + case "removeLayer": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeLayer(ctx, field) + }) + + case "updateLayer": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateLayer(ctx, field) + }) + + case "moveLayer": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_moveLayer(ctx, field) + }) + + case "createInfobox": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createInfobox(ctx, field) + }) + + case "removeInfobox": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeInfobox(ctx, field) + }) + + case "addInfoboxField": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addInfoboxField(ctx, field) + }) + + case "moveInfoboxField": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_moveInfoboxField(ctx, field) + }) + + case "removeInfoboxField": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeInfoboxField(ctx, field) + }) + + case "importLayer": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_importLayer(ctx, field) + }) + + case "attachTagToLayer": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_attachTagToLayer(ctx, field) + }) + + case "detachTagFromLayer": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_detachTagFromLayer(ctx, field) + }) + + case "createTagItem": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createTagItem(ctx, field) + }) + + case "createTagGroup": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createTagGroup(ctx, field) + }) + + case "attachTagItemToGroup": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_attachTagItemToGroup(ctx, field) + }) + + case "detachTagItemFromGroup": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_detachTagItemFromGroup(ctx, field) + }) + + case "updateTag": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateTag(ctx, field) + }) + + case "removeTag": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeTag(ctx, field) + }) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var pageInfoImplementors = []string{"PageInfo"} + +func (ec *executionContext) _PageInfo(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PageInfo) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, pageInfoImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PageInfo") + case "startCursor": + + out.Values[i] = ec._PageInfo_startCursor(ctx, field, obj) + + case "endCursor": + + out.Values[i] = ec._PageInfo_endCursor(ctx, field, obj) + + case "hasNextPage": + + out.Values[i] = ec._PageInfo_hasNextPage(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "hasPreviousPage": + + out.Values[i] = ec._PageInfo_hasPreviousPage(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var pluginImplementors = []string{"Plugin"} + +func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Plugin) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, pluginImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Plugin") + case "id": + + out.Values[i] = ec._Plugin_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "sceneId": + + out.Values[i] = ec._Plugin_sceneId(ctx, field, obj) + + case "name": + + out.Values[i] = ec._Plugin_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "version": + + out.Values[i] = ec._Plugin_version(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "description": + + out.Values[i] = ec._Plugin_description(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "author": + + out.Values[i] = ec._Plugin_author(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "repositoryUrl": + + out.Values[i] = ec._Plugin_repositoryUrl(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertySchemaId": + + out.Values[i] = ec._Plugin_propertySchemaId(ctx, field, obj) + + case "extensions": + + out.Values[i] = ec._Plugin_extensions(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "scenePlugin": + + out.Values[i] = ec._Plugin_scenePlugin(ctx, field, obj) + + case "allTranslatedDescription": + + out.Values[i] = ec._Plugin_allTranslatedDescription(ctx, field, obj) + + case "allTranslatedName": + + out.Values[i] = ec._Plugin_allTranslatedName(ctx, field, obj) + + case "scene": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Plugin_scene(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "translatedName": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Plugin_translatedName(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "translatedDescription": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Plugin_translatedDescription(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "propertySchema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Plugin_propertySchema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var pluginExtensionImplementors = []string{"PluginExtension"} + +func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PluginExtension) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, pluginExtensionImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PluginExtension") + case "extensionId": + + out.Values[i] = ec._PluginExtension_extensionId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "pluginId": + + out.Values[i] = ec._PluginExtension_pluginId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "type": + + out.Values[i] = ec._PluginExtension_type(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + + out.Values[i] = ec._PluginExtension_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "description": + + out.Values[i] = ec._PluginExtension_description(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "icon": + + out.Values[i] = ec._PluginExtension_icon(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "singleOnly": + + out.Values[i] = ec._PluginExtension_singleOnly(ctx, field, obj) + + case "widgetLayout": + + out.Values[i] = ec._PluginExtension_widgetLayout(ctx, field, obj) + + case "visualizer": + + out.Values[i] = ec._PluginExtension_visualizer(ctx, field, obj) + + case "propertySchemaId": + + out.Values[i] = ec._PluginExtension_propertySchemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "allTranslatedName": + + out.Values[i] = ec._PluginExtension_allTranslatedName(ctx, field, obj) + + case "allTranslatedDescription": + + out.Values[i] = ec._PluginExtension_allTranslatedDescription(ctx, field, obj) + + case "plugin": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PluginExtension_plugin(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "sceneWidget": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PluginExtension_sceneWidget(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "propertySchema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PluginExtension_propertySchema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "translatedName": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PluginExtension_translatedName(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "translatedDescription": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PluginExtension_translatedDescription(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var projectImplementors = []string{"Project", "Node"} + +func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Project) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, projectImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Project") + case "id": + + out.Values[i] = ec._Project_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "isArchived": + + out.Values[i] = ec._Project_isArchived(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "isBasicAuthActive": + + out.Values[i] = ec._Project_isBasicAuthActive(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "basicAuthUsername": + + out.Values[i] = ec._Project_basicAuthUsername(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "basicAuthPassword": + + out.Values[i] = ec._Project_basicAuthPassword(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "createdAt": + + out.Values[i] = ec._Project_createdAt(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "updatedAt": + + out.Values[i] = ec._Project_updatedAt(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "publishedAt": + + out.Values[i] = ec._Project_publishedAt(ctx, field, obj) + + case "name": + + out.Values[i] = ec._Project_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "description": + + out.Values[i] = ec._Project_description(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "alias": + + out.Values[i] = ec._Project_alias(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "publicTitle": + + out.Values[i] = ec._Project_publicTitle(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "publicDescription": + + out.Values[i] = ec._Project_publicDescription(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "publicImage": + + out.Values[i] = ec._Project_publicImage(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "publicNoIndex": + + out.Values[i] = ec._Project_publicNoIndex(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "imageUrl": + + out.Values[i] = ec._Project_imageUrl(ctx, field, obj) + + case "teamId": + + out.Values[i] = ec._Project_teamId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "visualizer": + + out.Values[i] = ec._Project_visualizer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "publishmentStatus": + + out.Values[i] = ec._Project_publishmentStatus(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "team": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Project_team(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scene": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Project_scene(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var projectAliasAvailabilityImplementors = []string{"ProjectAliasAvailability"} + +func (ec *executionContext) _ProjectAliasAvailability(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectAliasAvailability) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, projectAliasAvailabilityImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ProjectAliasAvailability") + case "alias": + + out.Values[i] = ec._ProjectAliasAvailability_alias(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "available": + + out.Values[i] = ec._ProjectAliasAvailability_available(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var projectConnectionImplementors = []string{"ProjectConnection"} + +func (ec *executionContext) _ProjectConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectConnection) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, projectConnectionImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ProjectConnection") + case "edges": + + out.Values[i] = ec._ProjectConnection_edges(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "nodes": + + out.Values[i] = ec._ProjectConnection_nodes(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "pageInfo": + + out.Values[i] = ec._ProjectConnection_pageInfo(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "totalCount": + + out.Values[i] = ec._ProjectConnection_totalCount(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var projectEdgeImplementors = []string{"ProjectEdge"} + +func (ec *executionContext) _ProjectEdge(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectEdge) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, projectEdgeImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ProjectEdge") + case "cursor": + + out.Values[i] = ec._ProjectEdge_cursor(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "node": + + out.Values[i] = ec._ProjectEdge_node(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var projectPayloadImplementors = []string{"ProjectPayload"} + +func (ec *executionContext) _ProjectPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, projectPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ProjectPayload") + case "project": + + out.Values[i] = ec._ProjectPayload_project(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyImplementors = []string{"Property", "Node"} + +func (ec *executionContext) _Property(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Property) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Property") + case "id": + + out.Values[i] = ec._Property_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + + out.Values[i] = ec._Property_schemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "items": + + out.Values[i] = ec._Property_items(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Property_schema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "layer": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Property_layer(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "merged": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Property_merged(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyConditionImplementors = []string{"PropertyCondition"} + +func (ec *executionContext) _PropertyCondition(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyCondition) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyConditionImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyCondition") + case "fieldId": + + out.Values[i] = ec._PropertyCondition_fieldId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "type": + + out.Values[i] = ec._PropertyCondition_type(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "value": + + out.Values[i] = ec._PropertyCondition_value(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyFieldImplementors = []string{"PropertyField"} + +func (ec *executionContext) _PropertyField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyFieldImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyField") + case "id": + + out.Values[i] = ec._PropertyField_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "parentId": + + out.Values[i] = ec._PropertyField_parentId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + + out.Values[i] = ec._PropertyField_schemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fieldId": + + out.Values[i] = ec._PropertyField_fieldId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "links": + + out.Values[i] = ec._PropertyField_links(ctx, field, obj) + + case "type": + + out.Values[i] = ec._PropertyField_type(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "value": + + out.Values[i] = ec._PropertyField_value(ctx, field, obj) + + case "parent": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyField_parent(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "schema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyField_schema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "field": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyField_field(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "actualValue": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyField_actualValue(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyFieldLinkImplementors = []string{"PropertyFieldLink"} + +func (ec *executionContext) _PropertyFieldLink(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyFieldLink) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyFieldLinkImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyFieldLink") + case "datasetId": + + out.Values[i] = ec._PropertyFieldLink_datasetId(ctx, field, obj) + + case "datasetSchemaId": + + out.Values[i] = ec._PropertyFieldLink_datasetSchemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "datasetSchemaFieldId": + + out.Values[i] = ec._PropertyFieldLink_datasetSchemaFieldId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "dataset": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyFieldLink_dataset(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "datasetField": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyFieldLink_datasetField(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "datasetSchema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyFieldLink_datasetSchema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "datasetSchemaField": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyFieldLink_datasetSchemaField(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyFieldPayloadImplementors = []string{"PropertyFieldPayload"} + +func (ec *executionContext) _PropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyFieldPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyFieldPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyFieldPayload") + case "property": + + out.Values[i] = ec._PropertyFieldPayload_property(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "propertyField": + + out.Values[i] = ec._PropertyFieldPayload_propertyField(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyGroupImplementors = []string{"PropertyGroup", "PropertyItem"} + +func (ec *executionContext) _PropertyGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyGroup) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyGroupImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyGroup") + case "id": + + out.Values[i] = ec._PropertyGroup_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + + out.Values[i] = ec._PropertyGroup_schemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaGroupId": + + out.Values[i] = ec._PropertyGroup_schemaGroupId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fields": + + out.Values[i] = ec._PropertyGroup_fields(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyGroup_schema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "schemaGroup": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyGroup_schemaGroup(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyGroupListImplementors = []string{"PropertyGroupList", "PropertyItem"} + +func (ec *executionContext) _PropertyGroupList(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyGroupList) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyGroupListImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyGroupList") + case "id": + + out.Values[i] = ec._PropertyGroupList_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + + out.Values[i] = ec._PropertyGroupList_schemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaGroupId": + + out.Values[i] = ec._PropertyGroupList_schemaGroupId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "groups": + + out.Values[i] = ec._PropertyGroupList_groups(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyGroupList_schema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "schemaGroup": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyGroupList_schemaGroup(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyItemPayloadImplementors = []string{"PropertyItemPayload"} + +func (ec *executionContext) _PropertyItemPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyItemPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyItemPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyItemPayload") + case "property": + + out.Values[i] = ec._PropertyItemPayload_property(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "propertyItem": + + out.Values[i] = ec._PropertyItemPayload_propertyItem(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyLinkableFieldsImplementors = []string{"PropertyLinkableFields"} + +func (ec *executionContext) _PropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyLinkableFields) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyLinkableFieldsImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyLinkableFields") + case "schemaId": + + out.Values[i] = ec._PropertyLinkableFields_schemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "latlng": + + out.Values[i] = ec._PropertyLinkableFields_latlng(ctx, field, obj) + + case "url": + + out.Values[i] = ec._PropertyLinkableFields_url(ctx, field, obj) + + case "latlngField": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyLinkableFields_latlngField(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "urlField": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyLinkableFields_urlField(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "schema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyLinkableFields_schema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertySchemaImplementors = []string{"PropertySchema"} + +func (ec *executionContext) _PropertySchema(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertySchema) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertySchema") + case "id": + + out.Values[i] = ec._PropertySchema_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "groups": + + out.Values[i] = ec._PropertySchema_groups(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "linkableFields": + + out.Values[i] = ec._PropertySchema_linkableFields(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertySchemaFieldImplementors = []string{"PropertySchemaField"} + +func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertySchemaField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaFieldImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertySchemaField") + case "fieldId": + + out.Values[i] = ec._PropertySchemaField_fieldId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "type": + + out.Values[i] = ec._PropertySchemaField_type(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "title": + + out.Values[i] = ec._PropertySchemaField_title(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "description": + + out.Values[i] = ec._PropertySchemaField_description(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "prefix": + + out.Values[i] = ec._PropertySchemaField_prefix(ctx, field, obj) + + case "suffix": + + out.Values[i] = ec._PropertySchemaField_suffix(ctx, field, obj) + + case "defaultValue": + + out.Values[i] = ec._PropertySchemaField_defaultValue(ctx, field, obj) + + case "ui": + + out.Values[i] = ec._PropertySchemaField_ui(ctx, field, obj) + + case "min": + + out.Values[i] = ec._PropertySchemaField_min(ctx, field, obj) + + case "max": + + out.Values[i] = ec._PropertySchemaField_max(ctx, field, obj) + + case "choices": + + out.Values[i] = ec._PropertySchemaField_choices(ctx, field, obj) + + case "isAvailableIf": + + out.Values[i] = ec._PropertySchemaField_isAvailableIf(ctx, field, obj) + + case "allTranslatedTitle": + + out.Values[i] = ec._PropertySchemaField_allTranslatedTitle(ctx, field, obj) + + case "allTranslatedDescription": + + out.Values[i] = ec._PropertySchemaField_allTranslatedDescription(ctx, field, obj) + + case "translatedTitle": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertySchemaField_translatedTitle(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "translatedDescription": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertySchemaField_translatedDescription(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertySchemaFieldChoiceImplementors = []string{"PropertySchemaFieldChoice"} + +func (ec *executionContext) _PropertySchemaFieldChoice(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertySchemaFieldChoice) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaFieldChoiceImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertySchemaFieldChoice") + case "key": + + out.Values[i] = ec._PropertySchemaFieldChoice_key(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "title": + + out.Values[i] = ec._PropertySchemaFieldChoice_title(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "icon": + + out.Values[i] = ec._PropertySchemaFieldChoice_icon(ctx, field, obj) + + case "allTranslatedTitle": + + out.Values[i] = ec._PropertySchemaFieldChoice_allTranslatedTitle(ctx, field, obj) + + case "translatedTitle": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertySchemaFieldChoice_translatedTitle(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertySchemaGroupImplementors = []string{"PropertySchemaGroup"} + +func (ec *executionContext) _PropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertySchemaGroup) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaGroupImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertySchemaGroup") + case "schemaGroupId": + + out.Values[i] = ec._PropertySchemaGroup_schemaGroupId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + + out.Values[i] = ec._PropertySchemaGroup_schemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fields": + + out.Values[i] = ec._PropertySchemaGroup_fields(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "isList": + + out.Values[i] = ec._PropertySchemaGroup_isList(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "isAvailableIf": + + out.Values[i] = ec._PropertySchemaGroup_isAvailableIf(ctx, field, obj) + + case "title": + + out.Values[i] = ec._PropertySchemaGroup_title(ctx, field, obj) + + case "allTranslatedTitle": + + out.Values[i] = ec._PropertySchemaGroup_allTranslatedTitle(ctx, field, obj) + + case "representativeFieldId": + + out.Values[i] = ec._PropertySchemaGroup_representativeFieldId(ctx, field, obj) + + case "representativeField": + + out.Values[i] = ec._PropertySchemaGroup_representativeField(ctx, field, obj) + + case "schema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertySchemaGroup_schema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "translatedTitle": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertySchemaGroup_translatedTitle(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var queryImplementors = []string{"Query"} + +func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, queryImplementors) + ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{ + Object: "Query", + }) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + innerCtx := graphql.WithRootFieldContext(ctx, &graphql.RootFieldContext{ + Object: field.Name, + Field: field, + }) + + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Query") + case "me": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_me(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "node": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_node(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "nodes": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_nodes(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "propertySchema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_propertySchema(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "propertySchemas": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_propertySchemas(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "plugin": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_plugin(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "plugins": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_plugins(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "layer": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_layer(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "scene": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_scene(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "assets": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_assets(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "projects": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_projects(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "datasetSchemas": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_datasetSchemas(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "datasets": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_datasets(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "dynamicDatasetSchemas": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_dynamicDatasetSchemas(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "searchUser": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_searchUser(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "checkProjectAlias": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_checkProjectAlias(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) + }) + case "__type": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Query___type(ctx, field) + }) + + case "__schema": + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Query___schema(ctx, field) + }) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var rectImplementors = []string{"Rect"} + +func (ec *executionContext) _Rect(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Rect) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, rectImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Rect") + case "west": + + out.Values[i] = ec._Rect_west(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "south": + + out.Values[i] = ec._Rect_south(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "east": + + out.Values[i] = ec._Rect_east(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "north": + + out.Values[i] = ec._Rect_north(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeAssetPayloadImplementors = []string{"RemoveAssetPayload"} + +func (ec *executionContext) _RemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveAssetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeAssetPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveAssetPayload") + case "assetId": + + out.Values[i] = ec._RemoveAssetPayload_assetId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeClusterPayloadImplementors = []string{"RemoveClusterPayload"} + +func (ec *executionContext) _RemoveClusterPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveClusterPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeClusterPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveClusterPayload") + case "scene": + + out.Values[i] = ec._RemoveClusterPayload_scene(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "clusterId": + + out.Values[i] = ec._RemoveClusterPayload_clusterId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeDatasetSchemaPayloadImplementors = []string{"RemoveDatasetSchemaPayload"} + +func (ec *executionContext) _RemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveDatasetSchemaPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeDatasetSchemaPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveDatasetSchemaPayload") + case "schemaId": + + out.Values[i] = ec._RemoveDatasetSchemaPayload_schemaId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeInfoboxFieldPayloadImplementors = []string{"RemoveInfoboxFieldPayload"} + +func (ec *executionContext) _RemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveInfoboxFieldPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeInfoboxFieldPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveInfoboxFieldPayload") + case "infoboxFieldId": + + out.Values[i] = ec._RemoveInfoboxFieldPayload_infoboxFieldId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "layer": + + out.Values[i] = ec._RemoveInfoboxFieldPayload_layer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeInfoboxPayloadImplementors = []string{"RemoveInfoboxPayload"} + +func (ec *executionContext) _RemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveInfoboxPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeInfoboxPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveInfoboxPayload") + case "layer": + + out.Values[i] = ec._RemoveInfoboxPayload_layer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeLayerPayloadImplementors = []string{"RemoveLayerPayload"} + +func (ec *executionContext) _RemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveLayerPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeLayerPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveLayerPayload") + case "layerId": + + out.Values[i] = ec._RemoveLayerPayload_layerId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "parentLayer": + + out.Values[i] = ec._RemoveLayerPayload_parentLayer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeMemberFromTeamPayloadImplementors = []string{"RemoveMemberFromTeamPayload"} + +func (ec *executionContext) _RemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveMemberFromTeamPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeMemberFromTeamPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveMemberFromTeamPayload") + case "team": + + out.Values[i] = ec._RemoveMemberFromTeamPayload_team(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeTagPayloadImplementors = []string{"RemoveTagPayload"} + +func (ec *executionContext) _RemoveTagPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveTagPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeTagPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveTagPayload") + case "tagId": + + out.Values[i] = ec._RemoveTagPayload_tagId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "updatedLayers": + + out.Values[i] = ec._RemoveTagPayload_updatedLayers(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeWidgetPayloadImplementors = []string{"RemoveWidgetPayload"} + +func (ec *executionContext) _RemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveWidgetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeWidgetPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveWidgetPayload") + case "scene": + + out.Values[i] = ec._RemoveWidgetPayload_scene(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "widgetId": + + out.Values[i] = ec._RemoveWidgetPayload_widgetId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var sceneImplementors = []string{"Scene", "Node"} + +func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Scene) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, sceneImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Scene") + case "id": + + out.Values[i] = ec._Scene_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "projectId": + + out.Values[i] = ec._Scene_projectId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "teamId": + + out.Values[i] = ec._Scene_teamId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + + out.Values[i] = ec._Scene_propertyId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "createdAt": + + out.Values[i] = ec._Scene_createdAt(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "updatedAt": + + out.Values[i] = ec._Scene_updatedAt(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "rootLayerId": + + out.Values[i] = ec._Scene_rootLayerId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "widgets": + + out.Values[i] = ec._Scene_widgets(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "plugins": + + out.Values[i] = ec._Scene_plugins(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "widgetAlignSystem": + + out.Values[i] = ec._Scene_widgetAlignSystem(ctx, field, obj) + + case "dynamicDatasetSchemas": + + out.Values[i] = ec._Scene_dynamicDatasetSchemas(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "project": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_project(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "team": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_team(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "property": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_property(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "rootLayer": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_rootLayer(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "datasetSchemas": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_datasetSchemas(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "tagIds": + + out.Values[i] = ec._Scene_tagIds(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "tags": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_tags(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "clusters": + + out.Values[i] = ec._Scene_clusters(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var scenePluginImplementors = []string{"ScenePlugin"} + +func (ec *executionContext) _ScenePlugin(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ScenePlugin) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, scenePluginImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ScenePlugin") + case "pluginId": + + out.Values[i] = ec._ScenePlugin_pluginId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + + out.Values[i] = ec._ScenePlugin_propertyId(ctx, field, obj) + + case "plugin": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._ScenePlugin_plugin(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "property": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._ScenePlugin_property(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var sceneWidgetImplementors = []string{"SceneWidget"} + +func (ec *executionContext) _SceneWidget(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.SceneWidget) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, sceneWidgetImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("SceneWidget") + case "id": + + out.Values[i] = ec._SceneWidget_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "pluginId": + + out.Values[i] = ec._SceneWidget_pluginId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "extensionId": + + out.Values[i] = ec._SceneWidget_extensionId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + + out.Values[i] = ec._SceneWidget_propertyId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "enabled": + + out.Values[i] = ec._SceneWidget_enabled(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "extended": + + out.Values[i] = ec._SceneWidget_extended(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "plugin": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._SceneWidget_plugin(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "extension": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._SceneWidget_extension(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "property": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._SceneWidget_property(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var signupPayloadImplementors = []string{"SignupPayload"} + +func (ec *executionContext) _SignupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.SignupPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, signupPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("SignupPayload") + case "user": + + out.Values[i] = ec._SignupPayload_user(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "team": + + out.Values[i] = ec._SignupPayload_team(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var syncDatasetPayloadImplementors = []string{"SyncDatasetPayload"} + +func (ec *executionContext) _SyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.SyncDatasetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, syncDatasetPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("SyncDatasetPayload") + case "sceneId": + + out.Values[i] = ec._SyncDatasetPayload_sceneId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "url": + + out.Values[i] = ec._SyncDatasetPayload_url(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "datasetSchema": + + out.Values[i] = ec._SyncDatasetPayload_datasetSchema(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "dataset": + + out.Values[i] = ec._SyncDatasetPayload_dataset(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var tagGroupImplementors = []string{"TagGroup", "Tag"} + +func (ec *executionContext) _TagGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.TagGroup) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, tagGroupImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("TagGroup") + case "id": + + out.Values[i] = ec._TagGroup_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "sceneId": + + out.Values[i] = ec._TagGroup_sceneId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "label": + + out.Values[i] = ec._TagGroup_label(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "tagIds": + + out.Values[i] = ec._TagGroup_tagIds(ctx, field, obj) + + case "tags": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagGroup_tags(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "scene": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagGroup_scene(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "layers": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagGroup_layers(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var tagItemImplementors = []string{"TagItem", "Tag"} + +func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.TagItem) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, tagItemImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("TagItem") + case "id": + + out.Values[i] = ec._TagItem_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "sceneId": + + out.Values[i] = ec._TagItem_sceneId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "label": + + out.Values[i] = ec._TagItem_label(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "parentId": + + out.Values[i] = ec._TagItem_parentId(ctx, field, obj) + + case "linkedDatasetID": + + out.Values[i] = ec._TagItem_linkedDatasetID(ctx, field, obj) + + case "linkedDatasetSchemaID": + + out.Values[i] = ec._TagItem_linkedDatasetSchemaID(ctx, field, obj) + + case "linkedDatasetFieldID": + + out.Values[i] = ec._TagItem_linkedDatasetFieldID(ctx, field, obj) + + case "linkedDatasetSchema": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagItem_linkedDatasetSchema(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "linkedDataset": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagItem_linkedDataset(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "linkedDatasetField": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagItem_linkedDatasetField(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "parent": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagItem_parent(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "layers": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagItem_layers(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var teamImplementors = []string{"Team", "Node"} + +func (ec *executionContext) _Team(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Team) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, teamImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Team") + case "id": + + out.Values[i] = ec._Team_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + + out.Values[i] = ec._Team_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "members": + + out.Values[i] = ec._Team_members(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "personal": + + out.Values[i] = ec._Team_personal(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "assets": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Team_assets(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "projects": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Team_projects(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var teamMemberImplementors = []string{"TeamMember"} + +func (ec *executionContext) _TeamMember(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.TeamMember) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, teamMemberImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("TeamMember") + case "userId": + + out.Values[i] = ec._TeamMember_userId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "role": + + out.Values[i] = ec._TeamMember_role(ctx, field, obj) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "user": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TeamMember_user(ctx, field, obj) + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var typographyImplementors = []string{"Typography"} + +func (ec *executionContext) _Typography(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Typography) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, typographyImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Typography") + case "fontFamily": + + out.Values[i] = ec._Typography_fontFamily(ctx, field, obj) + + case "fontWeight": + + out.Values[i] = ec._Typography_fontWeight(ctx, field, obj) + + case "fontSize": + + out.Values[i] = ec._Typography_fontSize(ctx, field, obj) + + case "color": + + out.Values[i] = ec._Typography_color(ctx, field, obj) + + case "textAlign": + + out.Values[i] = ec._Typography_textAlign(ctx, field, obj) + + case "bold": + + out.Values[i] = ec._Typography_bold(ctx, field, obj) + + case "italic": + + out.Values[i] = ec._Typography_italic(ctx, field, obj) + + case "underline": + + out.Values[i] = ec._Typography_underline(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var uninstallPluginPayloadImplementors = []string{"UninstallPluginPayload"} + +func (ec *executionContext) _UninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UninstallPluginPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, uninstallPluginPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UninstallPluginPayload") + case "pluginId": + + out.Values[i] = ec._UninstallPluginPayload_pluginId(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "scene": + + out.Values[i] = ec._UninstallPluginPayload_scene(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateClusterPayloadImplementors = []string{"UpdateClusterPayload"} + +func (ec *executionContext) _UpdateClusterPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateClusterPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateClusterPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateClusterPayload") + case "scene": + + out.Values[i] = ec._UpdateClusterPayload_scene(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "cluster": + + out.Values[i] = ec._UpdateClusterPayload_cluster(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateDatasetSchemaPayloadImplementors = []string{"UpdateDatasetSchemaPayload"} + +func (ec *executionContext) _UpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateDatasetSchemaPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateDatasetSchemaPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateDatasetSchemaPayload") + case "datasetSchema": + + out.Values[i] = ec._UpdateDatasetSchemaPayload_datasetSchema(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateLayerPayloadImplementors = []string{"UpdateLayerPayload"} + +func (ec *executionContext) _UpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateLayerPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateLayerPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateLayerPayload") + case "layer": + + out.Values[i] = ec._UpdateLayerPayload_layer(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateMePayloadImplementors = []string{"UpdateMePayload"} + +func (ec *executionContext) _UpdateMePayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateMePayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateMePayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateMePayload") + case "me": + + out.Values[i] = ec._UpdateMePayload_me(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateMemberOfTeamPayloadImplementors = []string{"UpdateMemberOfTeamPayload"} + +func (ec *executionContext) _UpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateMemberOfTeamPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateMemberOfTeamPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateMemberOfTeamPayload") + case "team": + + out.Values[i] = ec._UpdateMemberOfTeamPayload_team(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateTagPayloadImplementors = []string{"UpdateTagPayload"} + +func (ec *executionContext) _UpdateTagPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateTagPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateTagPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateTagPayload") + case "tag": + + out.Values[i] = ec._UpdateTagPayload_tag(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateTeamPayloadImplementors = []string{"UpdateTeamPayload"} + +func (ec *executionContext) _UpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateTeamPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateTeamPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateTeamPayload") + case "team": + + out.Values[i] = ec._UpdateTeamPayload_team(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateWidgetAlignSystemPayloadImplementors = []string{"UpdateWidgetAlignSystemPayload"} + +func (ec *executionContext) _UpdateWidgetAlignSystemPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateWidgetAlignSystemPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateWidgetAlignSystemPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateWidgetAlignSystemPayload") + case "scene": + + out.Values[i] = ec._UpdateWidgetAlignSystemPayload_scene(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateWidgetPayloadImplementors = []string{"UpdateWidgetPayload"} + +func (ec *executionContext) _UpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateWidgetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateWidgetPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateWidgetPayload") + case "scene": + + out.Values[i] = ec._UpdateWidgetPayload_scene(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "sceneWidget": + + out.Values[i] = ec._UpdateWidgetPayload_sceneWidget(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var upgradePluginPayloadImplementors = []string{"UpgradePluginPayload"} + +func (ec *executionContext) _UpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpgradePluginPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, upgradePluginPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpgradePluginPayload") + case "scene": + + out.Values[i] = ec._UpgradePluginPayload_scene(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "scenePlugin": + + out.Values[i] = ec._UpgradePluginPayload_scenePlugin(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var uploadPluginPayloadImplementors = []string{"UploadPluginPayload"} + +func (ec *executionContext) _UploadPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UploadPluginPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, uploadPluginPayloadImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UploadPluginPayload") + case "plugin": + + out.Values[i] = ec._UploadPluginPayload_plugin(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "scene": + + out.Values[i] = ec._UploadPluginPayload_scene(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "scenePlugin": + + out.Values[i] = ec._UploadPluginPayload_scenePlugin(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var userImplementors = []string{"User", "Node"} + +func (ec *executionContext) _User(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.User) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, userImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("User") + case "id": + + out.Values[i] = ec._User_id(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "name": + + out.Values[i] = ec._User_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "email": + + out.Values[i] = ec._User_email(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetAlignSystemImplementors = []string{"WidgetAlignSystem"} + +func (ec *executionContext) _WidgetAlignSystem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetAlignSystem) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetAlignSystemImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetAlignSystem") + case "inner": + + out.Values[i] = ec._WidgetAlignSystem_inner(ctx, field, obj) + + case "outer": + + out.Values[i] = ec._WidgetAlignSystem_outer(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetAreaImplementors = []string{"WidgetArea"} + +func (ec *executionContext) _WidgetArea(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetArea) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetAreaImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetArea") + case "widgetIds": + + out.Values[i] = ec._WidgetArea_widgetIds(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "align": + + out.Values[i] = ec._WidgetArea_align(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetExtendableImplementors = []string{"WidgetExtendable"} + +func (ec *executionContext) _WidgetExtendable(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetExtendable) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetExtendableImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetExtendable") + case "vertically": + + out.Values[i] = ec._WidgetExtendable_vertically(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "horizontally": + + out.Values[i] = ec._WidgetExtendable_horizontally(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetLayoutImplementors = []string{"WidgetLayout"} + +func (ec *executionContext) _WidgetLayout(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetLayout) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetLayoutImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetLayout") + case "extendable": + + out.Values[i] = ec._WidgetLayout_extendable(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "extended": + + out.Values[i] = ec._WidgetLayout_extended(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "floating": + + out.Values[i] = ec._WidgetLayout_floating(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "defaultLocation": + + out.Values[i] = ec._WidgetLayout_defaultLocation(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetLocationImplementors = []string{"WidgetLocation"} + +func (ec *executionContext) _WidgetLocation(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetLocation) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetLocationImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetLocation") + case "zone": + + out.Values[i] = ec._WidgetLocation_zone(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "section": + + out.Values[i] = ec._WidgetLocation_section(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "area": + + out.Values[i] = ec._WidgetLocation_area(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetSectionImplementors = []string{"WidgetSection"} + +func (ec *executionContext) _WidgetSection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetSection) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetSectionImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetSection") + case "top": + + out.Values[i] = ec._WidgetSection_top(ctx, field, obj) + + case "middle": + + out.Values[i] = ec._WidgetSection_middle(ctx, field, obj) + + case "bottom": + + out.Values[i] = ec._WidgetSection_bottom(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetZoneImplementors = []string{"WidgetZone"} + +func (ec *executionContext) _WidgetZone(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetZone) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetZoneImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetZone") + case "left": + + out.Values[i] = ec._WidgetZone_left(ctx, field, obj) + + case "center": + + out.Values[i] = ec._WidgetZone_center(ctx, field, obj) + + case "right": + + out.Values[i] = ec._WidgetZone_right(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var __DirectiveImplementors = []string{"__Directive"} + +func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionSet, obj *introspection.Directive) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __DirectiveImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Directive") + case "name": + + out.Values[i] = ec.___Directive_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "description": + + out.Values[i] = ec.___Directive_description(ctx, field, obj) + + case "locations": + + out.Values[i] = ec.___Directive_locations(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "args": + + out.Values[i] = ec.___Directive_args(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "isRepeatable": + + out.Values[i] = ec.___Directive_isRepeatable(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var __EnumValueImplementors = []string{"__EnumValue"} + +func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.EnumValue) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __EnumValueImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__EnumValue") + case "name": + + out.Values[i] = ec.___EnumValue_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "description": + + out.Values[i] = ec.___EnumValue_description(ctx, field, obj) + + case "isDeprecated": + + out.Values[i] = ec.___EnumValue_isDeprecated(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "deprecationReason": + + out.Values[i] = ec.___EnumValue_deprecationReason(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var __FieldImplementors = []string{"__Field"} + +func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, obj *introspection.Field) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __FieldImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Field") + case "name": + + out.Values[i] = ec.___Field_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "description": + + out.Values[i] = ec.___Field_description(ctx, field, obj) + + case "args": + + out.Values[i] = ec.___Field_args(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "type": + + out.Values[i] = ec.___Field_type(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "isDeprecated": + + out.Values[i] = ec.___Field_isDeprecated(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "deprecationReason": + + out.Values[i] = ec.___Field_deprecationReason(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var __InputValueImplementors = []string{"__InputValue"} + +func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.InputValue) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __InputValueImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__InputValue") + case "name": + + out.Values[i] = ec.___InputValue_name(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "description": + + out.Values[i] = ec.___InputValue_description(ctx, field, obj) + + case "type": + + out.Values[i] = ec.___InputValue_type(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "defaultValue": + + out.Values[i] = ec.___InputValue_defaultValue(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var __SchemaImplementors = []string{"__Schema"} + +func (ec *executionContext) ___Schema(ctx context.Context, sel ast.SelectionSet, obj *introspection.Schema) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __SchemaImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Schema") + case "description": + + out.Values[i] = ec.___Schema_description(ctx, field, obj) + + case "types": + + out.Values[i] = ec.___Schema_types(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "queryType": + + out.Values[i] = ec.___Schema_queryType(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "mutationType": + + out.Values[i] = ec.___Schema_mutationType(ctx, field, obj) + + case "subscriptionType": + + out.Values[i] = ec.___Schema_subscriptionType(ctx, field, obj) + + case "directives": + + out.Values[i] = ec.___Schema_directives(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var __TypeImplementors = []string{"__Type"} + +func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, obj *introspection.Type) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __TypeImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Type") + case "kind": + + out.Values[i] = ec.___Type_kind(ctx, field, obj) + + if out.Values[i] == graphql.Null { + invalids++ + } + case "name": + + out.Values[i] = ec.___Type_name(ctx, field, obj) + + case "description": + + out.Values[i] = ec.___Type_description(ctx, field, obj) + + case "fields": + + out.Values[i] = ec.___Type_fields(ctx, field, obj) + + case "interfaces": + + out.Values[i] = ec.___Type_interfaces(ctx, field, obj) + + case "possibleTypes": + + out.Values[i] = ec.___Type_possibleTypes(ctx, field, obj) + + case "enumValues": + + out.Values[i] = ec.___Type_enumValues(ctx, field, obj) + + case "inputFields": + + out.Values[i] = ec.___Type_inputFields(ctx, field, obj) + + case "ofType": + + out.Values[i] = ec.___Type_ofType(ctx, field, obj) + + case "specifiedByURL": + + out.Values[i] = ec.___Type_specifiedByURL(ctx, field, obj) + + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +// endregion **************************** object.gotpl **************************** + +// region ***************************** type.gotpl ***************************** + +func (ec *executionContext) unmarshalNAddClusterInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddClusterInput(ctx context.Context, v interface{}) (gqlmodel.AddClusterInput, error) { + res, err := ec.unmarshalInputAddClusterInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNAddDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDatasetSchemaInput(ctx context.Context, v interface{}) (gqlmodel.AddDatasetSchemaInput, error) { + res, err := ec.unmarshalInputAddDatasetSchemaInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNAddDynamicDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetInput(ctx context.Context, v interface{}) (gqlmodel.AddDynamicDatasetInput, error) { + res, err := ec.unmarshalInputAddDynamicDatasetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNAddDynamicDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetSchemaInput(ctx context.Context, v interface{}) (gqlmodel.AddDynamicDatasetSchemaInput, error) { + res, err := ec.unmarshalInputAddDynamicDatasetSchemaInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNAddInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddInfoboxFieldInput(ctx context.Context, v interface{}) (gqlmodel.AddInfoboxFieldInput, error) { + res, err := ec.unmarshalInputAddInfoboxFieldInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNAddLayerGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerGroupInput(ctx context.Context, v interface{}) (gqlmodel.AddLayerGroupInput, error) { + res, err := ec.unmarshalInputAddLayerGroupInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNAddLayerItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerItemInput(ctx context.Context, v interface{}) (gqlmodel.AddLayerItemInput, error) { + res, err := ec.unmarshalInputAddLayerItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNAddMemberToTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddMemberToTeamInput(ctx context.Context, v interface{}) (gqlmodel.AddMemberToTeamInput, error) { + res, err := ec.unmarshalInputAddMemberToTeamInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNAddPropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddPropertyItemInput(ctx context.Context, v interface{}) (gqlmodel.AddPropertyItemInput, error) { + res, err := ec.unmarshalInputAddPropertyItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNAddWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddWidgetInput(ctx context.Context, v interface{}) (gqlmodel.AddWidgetInput, error) { + res, err := ec.unmarshalInputAddWidgetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Asset) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + return ret +} + +func (ec *executionContext) marshalNAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Asset) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Asset(ctx, sel, v) +} + +func (ec *executionContext) marshalNAssetConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetConnection(ctx context.Context, sel ast.SelectionSet, v gqlmodel.AssetConnection) graphql.Marshaler { + return ec._AssetConnection(ctx, sel, &v) +} + +func (ec *executionContext) marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AssetConnection) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._AssetConnection(ctx, sel, v) +} + +func (ec *executionContext) marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.AssetEdge) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNAssetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetEdge(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNAssetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AssetEdge) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._AssetEdge(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNAttachTagItemToGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagItemToGroupInput(ctx context.Context, v interface{}) (gqlmodel.AttachTagItemToGroupInput, error) { + res, err := ec.unmarshalInputAttachTagItemToGroupInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNAttachTagToLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagToLayerInput(ctx context.Context, v interface{}) (gqlmodel.AttachTagToLayerInput, error) { + res, err := ec.unmarshalInputAttachTagToLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNBoolean2bool(ctx context.Context, v interface{}) (bool, error) { + res, err := graphql.UnmarshalBoolean(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler { + res := graphql.MarshalBoolean(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) marshalNCluster2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšClusterแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Cluster) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNCluster2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCluster(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNCluster2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCluster(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Cluster) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Cluster(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNCreateAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateAssetInput(ctx context.Context, v interface{}) (gqlmodel.CreateAssetInput, error) { + res, err := ec.unmarshalInputCreateAssetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNCreateInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateInfoboxInput(ctx context.Context, v interface{}) (gqlmodel.CreateInfoboxInput, error) { + res, err := ec.unmarshalInputCreateInfoboxInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNCreateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateProjectInput(ctx context.Context, v interface{}) (gqlmodel.CreateProjectInput, error) { + res, err := ec.unmarshalInputCreateProjectInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNCreateSceneInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateSceneInput(ctx context.Context, v interface{}) (gqlmodel.CreateSceneInput, error) { + res, err := ec.unmarshalInputCreateSceneInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNCreateTagGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagGroupInput(ctx context.Context, v interface{}) (gqlmodel.CreateTagGroupInput, error) { + res, err := ec.unmarshalInputCreateTagGroupInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNCreateTagItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagItemInput(ctx context.Context, v interface{}) (gqlmodel.CreateTagItemInput, error) { + res, err := ec.unmarshalInputCreateTagItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNCreateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTeamInput(ctx context.Context, v interface{}) (gqlmodel.CreateTeamInput, error) { + res, err := ec.unmarshalInputCreateTeamInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx context.Context, v interface{}) (usecase.Cursor, error) { + res, err := gqlmodel.UnmarshalCursor(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx context.Context, sel ast.SelectionSet, v usecase.Cursor) graphql.Marshaler { + res := gqlmodel.MarshalCursor(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Dataset) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + return ret +} + +func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Dataset) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNDataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNDataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Dataset) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Dataset(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetConnection(ctx context.Context, sel ast.SelectionSet, v gqlmodel.DatasetConnection) graphql.Marshaler { + return ec._DatasetConnection(ctx, sel, &v) +} + +func (ec *executionContext) marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetConnection) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._DatasetConnection(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.DatasetEdge) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNDatasetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetEdge(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNDatasetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetEdge) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._DatasetEdge(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.DatasetField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNDatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNDatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._DatasetField(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.DatasetSchema) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + return ret +} + +func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.DatasetSchema) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchema) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._DatasetSchema(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetSchemaConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, v gqlmodel.DatasetSchemaConnection) graphql.Marshaler { + return ec._DatasetSchemaConnection(ctx, sel, &v) +} + +func (ec *executionContext) marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchemaConnection) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._DatasetSchemaConnection(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.DatasetSchemaEdge) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNDatasetSchemaEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaEdge(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNDatasetSchemaEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchemaEdge) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._DatasetSchemaEdge(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.DatasetSchemaField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNDatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNDatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchemaField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._DatasetSchemaField(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNDateTime2timeแšTime(ctx context.Context, v interface{}) (time.Time, error) { + res, err := graphql.UnmarshalTime(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNDateTime2timeแšTime(ctx context.Context, sel ast.SelectionSet, v time.Time) graphql.Marshaler { + res := graphql.MarshalTime(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalNDeleteMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteMeInput(ctx context.Context, v interface{}) (gqlmodel.DeleteMeInput, error) { + res, err := ec.unmarshalInputDeleteMeInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNDeleteProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteProjectInput(ctx context.Context, v interface{}) (gqlmodel.DeleteProjectInput, error) { + res, err := ec.unmarshalInputDeleteProjectInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNDeleteTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteTeamInput(ctx context.Context, v interface{}) (gqlmodel.DeleteTeamInput, error) { + res, err := ec.unmarshalInputDeleteTeamInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNDetachTagFromLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagFromLayerInput(ctx context.Context, v interface{}) (gqlmodel.DetachTagFromLayerInput, error) { + res, err := ec.unmarshalInputDetachTagFromLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNDetachTagItemFromGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagItemFromGroupInput(ctx context.Context, v interface{}) (gqlmodel.DetachTagItemFromGroupInput, error) { + res, err := ec.unmarshalInputDetachTagItemFromGroupInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNFileSize2int64(ctx context.Context, v interface{}) (int64, error) { + res, err := graphql.UnmarshalInt64(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNFileSize2int64(ctx context.Context, sel ast.SelectionSet, v int64) graphql.Marshaler { + res := graphql.MarshalInt64(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalNFloat2float64(ctx context.Context, v interface{}) (float64, error) { + res, err := graphql.UnmarshalFloatContext(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNFloat2float64(ctx context.Context, sel ast.SelectionSet, v float64) graphql.Marshaler { + res := graphql.MarshalFloatContext(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return graphql.WrapContextMarshaler(ctx, res) +} + +func (ec *executionContext) unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx context.Context, v interface{}) (gqlmodel.ID, error) { + tmp, err := graphql.UnmarshalString(v) + res := gqlmodel.ID(tmp) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ID) graphql.Marshaler { + res := graphql.MarshalString(string(v)) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx context.Context, v interface{}) ([]gqlmodel.ID, error) { + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]gqlmodel.ID, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.ID) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + for i := range v { + ret[i] = ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, sel, v[i]) + } + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) unmarshalNImportDatasetFromGoogleSheetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetFromGoogleSheetInput(ctx context.Context, v interface{}) (gqlmodel.ImportDatasetFromGoogleSheetInput, error) { + res, err := ec.unmarshalInputImportDatasetFromGoogleSheetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNImportDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetInput(ctx context.Context, v interface{}) (gqlmodel.ImportDatasetInput, error) { + res, err := ec.unmarshalInputImportDatasetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNImportLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportLayerInput(ctx context.Context, v interface{}) (gqlmodel.ImportLayerInput, error) { + res, err := ec.unmarshalInputImportLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNInfobox2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Infobox) graphql.Marshaler { + return ec._Infobox(ctx, sel, &v) +} + +func (ec *executionContext) marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Infobox) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Infobox(ctx, sel, v) +} + +func (ec *executionContext) marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.InfoboxField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.InfoboxField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._InfoboxField(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNInstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInstallPluginInput(ctx context.Context, v interface{}) (gqlmodel.InstallPluginInput, error) { + res, err := ec.unmarshalInputInstallPluginInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNInt2int(ctx context.Context, v interface{}) (int, error) { + res, err := graphql.UnmarshalInt(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNInt2int(ctx context.Context, sel ast.SelectionSet, v int) graphql.Marshaler { + res := graphql.MarshalInt(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx context.Context, v interface{}) (language.Tag, error) { + res, err := gqlmodel.UnmarshalLang(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx context.Context, sel ast.SelectionSet, v language.Tag) graphql.Marshaler { + res := gqlmodel.MarshalLang(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Layer) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Layer(ctx, sel, v) +} + +func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.Layer) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + return ret +} + +func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.Layer) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) unmarshalNLayerEncodingFormat2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerEncodingFormat(ctx context.Context, v interface{}) (gqlmodel.LayerEncodingFormat, error) { + var res gqlmodel.LayerEncodingFormat + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNLayerEncodingFormat2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerEncodingFormat(ctx context.Context, sel ast.SelectionSet, v gqlmodel.LayerEncodingFormat) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerGroup) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._LayerGroup(ctx, sel, v) +} + +func (ec *executionContext) marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerItem) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._LayerItem(ctx, sel, v) +} + +func (ec *executionContext) marshalNLayerTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTag(ctx context.Context, sel ast.SelectionSet, v gqlmodel.LayerTag) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._LayerTag(ctx, sel, v) +} + +func (ec *executionContext) marshalNLayerTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.LayerTag) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNLayerTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTag(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNLayerTagItem2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagItemแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.LayerTagItem) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNLayerTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagItem(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNLayerTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagItem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerTagItem) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._LayerTagItem(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNLinkDatasetToPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLinkDatasetToPropertyValueInput(ctx context.Context, v interface{}) (gqlmodel.LinkDatasetToPropertyValueInput, error) { + res, err := ec.unmarshalInputLinkDatasetToPropertyValueInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNListOperation2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšListOperation(ctx context.Context, v interface{}) (gqlmodel.ListOperation, error) { + var res gqlmodel.ListOperation + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNListOperation2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšListOperation(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ListOperation) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNMe2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMe(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Me) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Me(ctx, sel, v) +} + +func (ec *executionContext) marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.MergedInfoboxField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedInfoboxField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._MergedInfoboxField(ctx, sel, v) +} + +func (ec *executionContext) marshalNMergedPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.MergedPropertyField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedPropertyField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._MergedPropertyField(ctx, sel, v) +} + +func (ec *executionContext) marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.MergedPropertyGroup) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroup(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedPropertyGroup) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._MergedPropertyGroup(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNMoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveInfoboxFieldInput(ctx context.Context, v interface{}) (gqlmodel.MoveInfoboxFieldInput, error) { + res, err := ec.unmarshalInputMoveInfoboxFieldInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNMoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveLayerInput(ctx context.Context, v interface{}) (gqlmodel.MoveLayerInput, error) { + res, err := ec.unmarshalInputMoveLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNMovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMovePropertyItemInput(ctx context.Context, v interface{}) (gqlmodel.MovePropertyItemInput, error) { + res, err := ec.unmarshalInputMovePropertyItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.Node) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + return ret +} + +func (ec *executionContext) unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNodeType(ctx context.Context, v interface{}) (gqlmodel.NodeType, error) { + var res gqlmodel.NodeType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNodeType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.NodeType) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PageInfo) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._PageInfo(ctx, sel, v) +} + +func (ec *executionContext) marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Plugin) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Plugin) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Plugin(ctx, sel, v) +} + +func (ec *executionContext) marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PluginExtension) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PluginExtension) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._PluginExtension(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionType(ctx context.Context, v interface{}) (gqlmodel.PluginExtensionType, error) { + var res gqlmodel.PluginExtensionType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PluginExtensionType) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Project) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + return ret +} + +func (ec *executionContext) marshalNProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Project) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Project(ctx, sel, v) +} + +func (ec *executionContext) marshalNProjectAliasAvailability2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectAliasAvailability(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ProjectAliasAvailability) graphql.Marshaler { + return ec._ProjectAliasAvailability(ctx, sel, &v) +} + +func (ec *executionContext) marshalNProjectAliasAvailability2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectAliasAvailability(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectAliasAvailability) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._ProjectAliasAvailability(ctx, sel, v) +} + +func (ec *executionContext) marshalNProjectConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ProjectConnection) graphql.Marshaler { + return ec._ProjectConnection(ctx, sel, &v) +} + +func (ec *executionContext) marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectConnection) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._ProjectConnection(ctx, sel, v) +} + +func (ec *executionContext) marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.ProjectEdge) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdge(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectEdge) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._ProjectEdge(ctx, sel, v) +} + +func (ec *executionContext) marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Property) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Property(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertyField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._PropertyField(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyFieldLink2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLink(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyFieldLink) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._PropertyFieldLink(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertyGroup) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyGroup) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._PropertyGroup(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PropertyItem) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._PropertyItem(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.PropertyItem) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyLinkableFields) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._PropertyLinkableFields(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertySchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchema) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchema) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._PropertySchema(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertySchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchemaField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._PropertySchemaField(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertySchemaFieldChoice2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldChoice(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaFieldChoice) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._PropertySchemaFieldChoice(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchemaGroup) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaGroup) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._PropertySchemaGroup(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNPublishProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishProjectInput(ctx context.Context, v interface{}) (gqlmodel.PublishProjectInput, error) { + res, err := ec.unmarshalInputPublishProjectInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishmentStatus(ctx context.Context, v interface{}) (gqlmodel.PublishmentStatus, error) { + var res gqlmodel.PublishmentStatus + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishmentStatus(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PublishmentStatus) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalNRemoveAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveAssetInput(ctx context.Context, v interface{}) (gqlmodel.RemoveAssetInput, error) { + res, err := ec.unmarshalInputRemoveAssetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemoveClusterInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveClusterInput(ctx context.Context, v interface{}) (gqlmodel.RemoveClusterInput, error) { + res, err := ec.unmarshalInputRemoveClusterInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemoveDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveDatasetSchemaInput(ctx context.Context, v interface{}) (gqlmodel.RemoveDatasetSchemaInput, error) { + res, err := ec.unmarshalInputRemoveDatasetSchemaInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxFieldInput(ctx context.Context, v interface{}) (gqlmodel.RemoveInfoboxFieldInput, error) { + res, err := ec.unmarshalInputRemoveInfoboxFieldInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemoveInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxInput(ctx context.Context, v interface{}) (gqlmodel.RemoveInfoboxInput, error) { + res, err := ec.unmarshalInputRemoveInfoboxInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveLayerInput(ctx context.Context, v interface{}) (gqlmodel.RemoveLayerInput, error) { + res, err := ec.unmarshalInputRemoveLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemoveMemberFromTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMemberFromTeamInput(ctx context.Context, v interface{}) (gqlmodel.RemoveMemberFromTeamInput, error) { + res, err := ec.unmarshalInputRemoveMemberFromTeamInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemoveMyAuthInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMyAuthInput(ctx context.Context, v interface{}) (gqlmodel.RemoveMyAuthInput, error) { + res, err := ec.unmarshalInputRemoveMyAuthInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemovePropertyFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemovePropertyFieldInput(ctx context.Context, v interface{}) (gqlmodel.RemovePropertyFieldInput, error) { + res, err := ec.unmarshalInputRemovePropertyFieldInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemovePropertyItemInput(ctx context.Context, v interface{}) (gqlmodel.RemovePropertyItemInput, error) { + res, err := ec.unmarshalInputRemovePropertyItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemoveTagInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveTagInput(ctx context.Context, v interface{}) (gqlmodel.RemoveTagInput, error) { + res, err := ec.unmarshalInputRemoveTagInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemoveWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveWidgetInput(ctx context.Context, v interface{}) (gqlmodel.RemoveWidgetInput, error) { + res, err := ec.unmarshalInputRemoveWidgetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRole(ctx context.Context, v interface{}) (gqlmodel.Role, error) { + var res gqlmodel.Role + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRole(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Role) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Scene) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Scene(ctx, sel, v) +} + +func (ec *executionContext) marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePluginแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.ScenePlugin) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ScenePlugin) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._ScenePlugin(ctx, sel, v) +} + +func (ec *executionContext) marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidgetแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.SceneWidget) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SceneWidget) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._SceneWidget(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNSignupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSignupInput(ctx context.Context, v interface{}) (gqlmodel.SignupInput, error) { + res, err := ec.unmarshalInputSignupInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNString2string(ctx context.Context, v interface{}) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNString2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalNString2แš•stringแš„(ctx context.Context, v interface{}) ([]string, error) { + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]string, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNString2string(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalNString2แš•stringแš„(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + for i := range v { + ret[i] = ec.marshalNString2string(ctx, sel, v[i]) + } + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) unmarshalNSyncDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSyncDatasetInput(ctx context.Context, v interface{}) (gqlmodel.SyncDatasetInput, error) { + res, err := ec.unmarshalInputSyncDatasetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Tag) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Tag(ctx, sel, v) +} + +func (ec *executionContext) marshalNTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.Tag) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TagGroup) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._TagGroup(ctx, sel, v) +} + +func (ec *executionContext) marshalNTagItem2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItemแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.TagItem) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItem(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TagItem) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._TagItem(ctx, sel, v) +} + +func (ec *executionContext) marshalNTeam2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Team) graphql.Marshaler { + return ec._Team(ctx, sel, &v) +} + +func (ec *executionContext) marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Team) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Team) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._Team(ctx, sel, v) +} + +func (ec *executionContext) marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamMemberแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.TeamMember) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNTeamMember2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamMember(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNTeamMember2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamMember(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TeamMember) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._TeamMember(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx context.Context, v interface{}) (gqlmodel.Theme, error) { + var res gqlmodel.Theme + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Theme) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalNUninstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUninstallPluginInput(ctx context.Context, v interface{}) (gqlmodel.UninstallPluginInput, error) { + res, err := ec.unmarshalInputUninstallPluginInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUnlinkPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUnlinkPropertyValueInput(ctx context.Context, v interface{}) (gqlmodel.UnlinkPropertyValueInput, error) { + res, err := ec.unmarshalInputUnlinkPropertyValueInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdateClusterInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateClusterInput(ctx context.Context, v interface{}) (gqlmodel.UpdateClusterInput, error) { + res, err := ec.unmarshalInputUpdateClusterInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdateDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateDatasetSchemaInput(ctx context.Context, v interface{}) (gqlmodel.UpdateDatasetSchemaInput, error) { + res, err := ec.unmarshalInputUpdateDatasetSchemaInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdateLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateLayerInput(ctx context.Context, v interface{}) (gqlmodel.UpdateLayerInput, error) { + res, err := ec.unmarshalInputUpdateLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdateMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMeInput(ctx context.Context, v interface{}) (gqlmodel.UpdateMeInput, error) { + res, err := ec.unmarshalInputUpdateMeInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdateMemberOfTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMemberOfTeamInput(ctx context.Context, v interface{}) (gqlmodel.UpdateMemberOfTeamInput, error) { + res, err := ec.unmarshalInputUpdateMemberOfTeamInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateProjectInput(ctx context.Context, v interface{}) (gqlmodel.UpdateProjectInput, error) { + res, err := ec.unmarshalInputUpdateProjectInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdatePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemInput(ctx context.Context, v interface{}) (gqlmodel.UpdatePropertyItemInput, error) { + res, err := ec.unmarshalInputUpdatePropertyItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdatePropertyItemOperationInput2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemOperationInputแš„(ctx context.Context, v interface{}) ([]*gqlmodel.UpdatePropertyItemOperationInput, error) { + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]*gqlmodel.UpdatePropertyItemOperationInput, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNUpdatePropertyItemOperationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemOperationInput(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) unmarshalNUpdatePropertyItemOperationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemOperationInput(ctx context.Context, v interface{}) (*gqlmodel.UpdatePropertyItemOperationInput, error) { + res, err := ec.unmarshalInputUpdatePropertyItemOperationInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdatePropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyValueInput(ctx context.Context, v interface{}) (gqlmodel.UpdatePropertyValueInput, error) { + res, err := ec.unmarshalInputUpdatePropertyValueInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdateTagInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTagInput(ctx context.Context, v interface{}) (gqlmodel.UpdateTagInput, error) { + res, err := ec.unmarshalInputUpdateTagInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTeamInput(ctx context.Context, v interface{}) (gqlmodel.UpdateTeamInput, error) { + res, err := ec.unmarshalInputUpdateTeamInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdateWidgetAlignSystemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetAlignSystemInput(ctx context.Context, v interface{}) (gqlmodel.UpdateWidgetAlignSystemInput, error) { + res, err := ec.unmarshalInputUpdateWidgetAlignSystemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdateWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetInput(ctx context.Context, v interface{}) (gqlmodel.UpdateWidgetInput, error) { + res, err := ec.unmarshalInputUpdateWidgetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpgradePluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpgradePluginInput(ctx context.Context, v interface{}) (gqlmodel.UpgradePluginInput, error) { + res, err := ec.unmarshalInputUpgradePluginInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, v interface{}) (graphql.Upload, error) { + res, err := graphql.UnmarshalUpload(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, sel ast.SelectionSet, v graphql.Upload) graphql.Marshaler { + res := graphql.MarshalUpload(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalNUploadFileToPropertyInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadFileToPropertyInput(ctx context.Context, v interface{}) (gqlmodel.UploadFileToPropertyInput, error) { + res, err := ec.unmarshalInputUploadFileToPropertyInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUploadPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadPluginInput(ctx context.Context, v interface{}) (gqlmodel.UploadPluginInput, error) { + res, err := ec.unmarshalInputUploadPluginInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.User) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._User(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx context.Context, v interface{}) (gqlmodel.ValueType, error) { + var res gqlmodel.ValueType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ValueType) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx context.Context, v interface{}) (gqlmodel.Visualizer, error) { + var res gqlmodel.Visualizer + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Visualizer) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalNWidgetAreaAlign2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx context.Context, v interface{}) (gqlmodel.WidgetAreaAlign, error) { + var res gqlmodel.WidgetAreaAlign + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNWidgetAreaAlign2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx context.Context, sel ast.SelectionSet, v gqlmodel.WidgetAreaAlign) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalNWidgetAreaType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaType(ctx context.Context, v interface{}) (gqlmodel.WidgetAreaType, error) { + var res gqlmodel.WidgetAreaType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNWidgetAreaType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.WidgetAreaType) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNWidgetExtendable2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetExtendable(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetExtendable) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._WidgetExtendable(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNWidgetLocationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocationInput(ctx context.Context, v interface{}) (*gqlmodel.WidgetLocationInput, error) { + res, err := ec.unmarshalInputWidgetLocationInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNWidgetSectionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSectionType(ctx context.Context, v interface{}) (gqlmodel.WidgetSectionType, error) { + var res gqlmodel.WidgetSectionType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNWidgetSectionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSectionType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.WidgetSectionType) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalNWidgetZoneType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZoneType(ctx context.Context, v interface{}) (gqlmodel.WidgetZoneType, error) { + var res gqlmodel.WidgetZoneType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNWidgetZoneType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZoneType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.WidgetZoneType) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalN__Directive2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšDirective(ctx context.Context, sel ast.SelectionSet, v introspection.Directive) graphql.Marshaler { + return ec.___Directive(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Directive2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšDirectiveแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.Directive) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Directive2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšDirective(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) unmarshalN__DirectiveLocation2string(ctx context.Context, v interface{}) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalN__DirectiveLocation2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalN__DirectiveLocation2แš•stringแš„(ctx context.Context, v interface{}) ([]string, error) { + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]string, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalN__DirectiveLocation2string(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalN__DirectiveLocation2แš•stringแš„(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__DirectiveLocation2string(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalN__EnumValue2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšEnumValue(ctx context.Context, sel ast.SelectionSet, v introspection.EnumValue) graphql.Marshaler { + return ec.___EnumValue(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Field2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšField(ctx context.Context, sel ast.SelectionSet, v introspection.Field) graphql.Marshaler { + return ec.___Field(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__InputValue2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValue(ctx context.Context, sel ast.SelectionSet, v introspection.InputValue) graphql.Marshaler { + return ec.___InputValue(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.InputValue) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__InputValue2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValue(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalN__Type2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx context.Context, sel ast.SelectionSet, v introspection.Type) graphql.Marshaler { + return ec.___Type(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.Type) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Type2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec.___Type(ctx, sel, v) +} + +func (ec *executionContext) unmarshalN__TypeKind2string(ctx context.Context, v interface{}) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalN__TypeKind2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) marshalOAddClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddClusterPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddClusterPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddClusterPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddDatasetSchemaPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddDatasetSchemaPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddDynamicDatasetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddDynamicDatasetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddDynamicDatasetSchemaPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddDynamicDatasetSchemaPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddInfoboxFieldPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddInfoboxFieldPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddLayerGroupPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddLayerGroupPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddLayerItemPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddLayerItemPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddMemberToTeamPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddMemberToTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddWidgetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddWidgetPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOAny2interface(ctx context.Context, v interface{}) (interface{}, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalAny(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOAny2interface(ctx context.Context, sel ast.SelectionSet, v interface{}) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := graphql.MarshalAny(v) + return res +} + +func (ec *executionContext) marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Asset) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Asset(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOAssetSortType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetSortType(ctx context.Context, v interface{}) (*gqlmodel.AssetSortType, error) { + if v == nil { + return nil, nil + } + var res = new(gqlmodel.AssetSortType) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOAssetSortType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetSortType(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AssetSortType) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) marshalOAttachTagItemToGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagItemToGroupPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AttachTagItemToGroupPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AttachTagItemToGroupPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAttachTagToLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagToLayerPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AttachTagToLayerPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AttachTagToLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOBoolean2bool(ctx context.Context, v interface{}) (bool, error) { + res, err := graphql.UnmarshalBoolean(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler { + res := graphql.MarshalBoolean(v) + return res +} + +func (ec *executionContext) unmarshalOBoolean2แš–bool(ctx context.Context, v interface{}) (*bool, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalBoolean(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOBoolean2แš–bool(ctx context.Context, sel ast.SelectionSet, v *bool) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := graphql.MarshalBoolean(*v) + return res +} + +func (ec *executionContext) marshalOCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateAssetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateAssetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._CreateAssetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateInfoboxPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._CreateInfoboxPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateScenePayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateScenePayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._CreateScenePayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOCreateTagGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagGroupPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateTagGroupPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._CreateTagGroupPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOCreateTagItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagItemPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateTagItemPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._CreateTagItemPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateTeamPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._CreateTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx context.Context, v interface{}) (*usecase.Cursor, error) { + if v == nil { + return nil, nil + } + res, err := gqlmodel.UnmarshalCursor(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx context.Context, sel ast.SelectionSet, v *usecase.Cursor) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := gqlmodel.MarshalCursor(*v) + return res +} + +func (ec *executionContext) marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Dataset) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Dataset(ctx, sel, v) +} + +func (ec *executionContext) marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetField) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DatasetField(ctx, sel, v) +} + +func (ec *executionContext) marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchema) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DatasetSchema(ctx, sel, v) +} + +func (ec *executionContext) marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchemaField) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DatasetSchemaField(ctx, sel, v) +} + +func (ec *executionContext) unmarshalODateTime2แš–timeแšTime(ctx context.Context, v interface{}) (*time.Time, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalTime(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalODateTime2แš–timeแšTime(ctx context.Context, sel ast.SelectionSet, v *time.Time) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := graphql.MarshalTime(*v) + return res +} + +func (ec *executionContext) marshalODeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteMePayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DeleteMePayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DeleteMePayload(ctx, sel, v) +} + +func (ec *executionContext) marshalODeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DeleteProjectPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DeleteProjectPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalODeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DeleteTeamPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DeleteTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalODetachTagFromLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagFromLayerPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DetachTagFromLayerPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DetachTagFromLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalODetachTagItemFromGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagItemFromGroupPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DetachTagItemFromGroupPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DetachTagItemFromGroupPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOFloat2แš–float64(ctx context.Context, v interface{}) (*float64, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalFloatContext(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOFloat2แš–float64(ctx context.Context, sel ast.SelectionSet, v *float64) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := graphql.MarshalFloatContext(*v) + return graphql.WrapContextMarshaler(ctx, res) +} + +func (ec *executionContext) unmarshalOID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx context.Context, v interface{}) ([]gqlmodel.ID, error) { + if v == nil { + return nil, nil + } + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]gqlmodel.ID, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalOID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.ID) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + for i := range v { + ret[i] = ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, sel, v[i]) + } + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx context.Context, v interface{}) (*gqlmodel.ID, error) { + if v == nil { + return nil, nil + } + tmp, err := graphql.UnmarshalString(v) + res := gqlmodel.ID(tmp) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ID) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := graphql.MarshalString(string(*v)) + return res +} + +func (ec *executionContext) marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ImportDatasetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._ImportDatasetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportLayerPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ImportLayerPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._ImportLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Infobox) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Infobox(ctx, sel, v) +} + +func (ec *executionContext) marshalOInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.InstallPluginPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._InstallPluginPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOInt2แš–int(ctx context.Context, v interface{}) (*int, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalInt(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOInt2แš–int(ctx context.Context, sel ast.SelectionSet, v *int) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := graphql.MarshalInt(*v) + return res +} + +func (ec *executionContext) unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx context.Context, v interface{}) (*language.Tag, error) { + if v == nil { + return nil, nil + } + res, err := gqlmodel.UnmarshalLang(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx context.Context, sel ast.SelectionSet, v *language.Tag) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := gqlmodel.MarshalLang(*v) + return res +} + +func (ec *executionContext) marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Layer) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Layer(ctx, sel, v) +} + +func (ec *executionContext) marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerGroup) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._LayerGroup(ctx, sel, v) +} + +func (ec *executionContext) marshalOLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerItem) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._LayerItem(ctx, sel, v) +} + +func (ec *executionContext) marshalOMe2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMe(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Me) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Me(ctx, sel, v) +} + +func (ec *executionContext) marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfobox(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedInfobox) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._MergedInfobox(ctx, sel, v) +} + +func (ec *executionContext) marshalOMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedInfoboxField) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._MergedInfoboxField(ctx, sel, v) +} + +func (ec *executionContext) marshalOMergedLayer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedLayer(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedLayer) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._MergedLayer(ctx, sel, v) +} + +func (ec *executionContext) marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedProperty) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._MergedProperty(ctx, sel, v) +} + +func (ec *executionContext) marshalOMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MoveInfoboxFieldPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._MoveInfoboxFieldPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MoveLayerPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._MoveLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Node) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Node(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOPagination2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPagination(ctx context.Context, v interface{}) (*gqlmodel.Pagination, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputPagination(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Plugin) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Plugin(ctx, sel, v) +} + +func (ec *executionContext) marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PluginExtension) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PluginExtension(ctx, sel, v) +} + +func (ec *executionContext) marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Project) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Project(ctx, sel, v) +} + +func (ec *executionContext) marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._ProjectPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Property) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Property(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyCondition(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyCondition) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertyCondition(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyField) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertyField(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLinkแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertyFieldLink) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertyFieldLink2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLink(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyFieldPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertyFieldPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyGroup) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertyGroup(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PropertyItem) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertyItem(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyItemPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertyItemPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchema) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertySchema(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaField) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertySchemaField(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertySchemaFieldChoice2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldChoiceแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchemaFieldChoice) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertySchemaFieldChoice2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldChoice(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) unmarshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldUI(ctx context.Context, v interface{}) (*gqlmodel.PropertySchemaFieldUI, error) { + if v == nil { + return nil, nil + } + var res = new(gqlmodel.PropertySchemaFieldUI) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldUI(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaFieldUI) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaGroup) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertySchemaGroup(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveAssetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveAssetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveClusterPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveClusterPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveClusterPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveDatasetSchemaPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveDatasetSchemaPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveInfoboxFieldPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveInfoboxFieldPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveInfoboxPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveInfoboxPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveLayerPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveMemberFromTeamPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveMemberFromTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveTagPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveTagPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveTagPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveTagPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveWidgetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveWidgetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Scene) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Scene(ctx, sel, v) +} + +func (ec *executionContext) marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ScenePlugin) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._ScenePlugin(ctx, sel, v) +} + +func (ec *executionContext) marshalOSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SceneWidget) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._SceneWidget(ctx, sel, v) +} + +func (ec *executionContext) marshalOSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSignupPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SignupPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._SignupPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOString2แš•stringแš„(ctx context.Context, v interface{}) ([]string, error) { + if v == nil { + return nil, nil + } + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]string, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNString2string(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalOString2แš•stringแš„(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + for i := range v { + ret[i] = ec.marshalNString2string(ctx, sel, v[i]) + } + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) unmarshalOString2แš–string(ctx context.Context, v interface{}) (*string, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalString(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOString2แš–string(ctx context.Context, sel ast.SelectionSet, v *string) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := graphql.MarshalString(*v) + return res +} + +func (ec *executionContext) marshalOSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SyncDatasetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._SyncDatasetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Tag) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Tag(ctx, sel, v) +} + +func (ec *executionContext) marshalOTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TagGroup) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._TagGroup(ctx, sel, v) +} + +func (ec *executionContext) marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Team) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Team(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTextAlign(ctx context.Context, v interface{}) (*gqlmodel.TextAlign, error) { + if v == nil { + return nil, nil + } + var res = new(gqlmodel.TextAlign) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTextAlign(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TextAlign) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) unmarshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx context.Context, v interface{}) (*gqlmodel.Theme, error) { + if v == nil { + return nil, nil + } + var res = new(gqlmodel.Theme) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Theme) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) unmarshalOTranslatedString2map(ctx context.Context, v interface{}) (map[string]string, error) { + if v == nil { + return nil, nil + } + res, err := gqlmodel.UnmarshalMap(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTranslatedString2map(ctx context.Context, sel ast.SelectionSet, v map[string]string) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := gqlmodel.MarshalMap(v) + return res +} + +func (ec *executionContext) unmarshalOURL2แš–netแš‹urlแšURL(ctx context.Context, v interface{}) (*url.URL, error) { + if v == nil { + return nil, nil + } + res, err := gqlmodel.UnmarshalURL(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOURL2แš–netแš‹urlแšURL(ctx context.Context, sel ast.SelectionSet, v *url.URL) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := gqlmodel.MarshalURL(*v) + return res +} + +func (ec *executionContext) marshalOUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UninstallPluginPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UninstallPluginPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateClusterPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateClusterPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateClusterPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateDatasetSchemaPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateDatasetSchemaPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateLayerPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMePayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateMePayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateMePayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateMemberOfTeamPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateMemberOfTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateTagPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTagPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateTagPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateTagPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateTeamPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateWidgetAlignSystemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetAlignSystemPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateWidgetAlignSystemPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateWidgetAlignSystemPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateWidgetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateWidgetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpgradePluginPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpgradePluginPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, v interface{}) (*graphql.Upload, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalUpload(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, sel ast.SelectionSet, v *graphql.Upload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := graphql.MarshalUpload(*v) + return res +} + +func (ec *executionContext) marshalOUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadPluginPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UploadPluginPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UploadPluginPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.User) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._User(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx context.Context, v interface{}) (*gqlmodel.ValueType, error) { + if v == nil { + return nil, nil + } + var res = new(gqlmodel.ValueType) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ValueType) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) unmarshalOVisualizer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx context.Context, v interface{}) (*gqlmodel.Visualizer, error) { + if v == nil { + return nil, nil + } + var res = new(gqlmodel.Visualizer) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOVisualizer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Visualizer) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) marshalOWidgetAlignSystem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAlignSystem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetAlignSystem) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._WidgetAlignSystem(ctx, sel, v) +} + +func (ec *executionContext) marshalOWidgetArea2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetArea(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetArea) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._WidgetArea(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOWidgetAreaAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx context.Context, v interface{}) (*gqlmodel.WidgetAreaAlign, error) { + if v == nil { + return nil, nil + } + var res = new(gqlmodel.WidgetAreaAlign) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOWidgetAreaAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetAreaAlign) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) marshalOWidgetLayout2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLayout(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetLayout) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._WidgetLayout(ctx, sel, v) +} + +func (ec *executionContext) marshalOWidgetLocation2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocation(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetLocation) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._WidgetLocation(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOWidgetLocationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocationInput(ctx context.Context, v interface{}) (*gqlmodel.WidgetLocationInput, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputWidgetLocationInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOWidgetSection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetSection) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._WidgetSection(ctx, sel, v) +} + +func (ec *executionContext) marshalOWidgetZone2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZone(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetZone) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._WidgetZone(ctx, sel, v) +} + +func (ec *executionContext) marshalO__EnumValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšEnumValueแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.EnumValue) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__EnumValue2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšEnumValue(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalO__Field2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšFieldแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.Field) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Field2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalO__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.InputValue) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__InputValue2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValue(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalO__Schema2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšSchema(ctx context.Context, sel ast.SelectionSet, v *introspection.Schema) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec.___Schema(ctx, sel, v) +} + +func (ec *executionContext) marshalO__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.Type) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Type2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec.___Type(ctx, sel, v) +} + +// endregion ***************************** type.gotpl ***************************** diff --git a/server/internal/adapter/gql/gqldataloader/assetloader_gen.go b/server/internal/adapter/gql/gqldataloader/assetloader_gen.go new file mode 100644 index 000000000..556880b44 --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/assetloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// AssetLoaderConfig captures the config to create a new AssetLoader +type AssetLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Asset, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewAssetLoader creates a new AssetLoader given a fetch, wait, and maxBatch +func NewAssetLoader(config AssetLoaderConfig) *AssetLoader { + return &AssetLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// AssetLoader batches and caches requests +type AssetLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Asset, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.Asset + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *assetLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type assetLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.Asset + error []error + closing bool + done chan struct{} +} + +// Load a Asset by key, batching and caching will be applied automatically +func (l *AssetLoader) Load(key gqlmodel.ID) (*gqlmodel.Asset, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Asset. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *AssetLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Asset, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.Asset, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &assetLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.Asset, error) { + <-batch.done + + var data *gqlmodel.Asset + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *AssetLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Asset, []error) { + results := make([]func() (*gqlmodel.Asset, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + assets := make([]*gqlmodel.Asset, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + assets[i], errors[i] = thunk() + } + return assets, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Assets. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *AssetLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Asset, []error) { + results := make([]func() (*gqlmodel.Asset, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.Asset, []error) { + assets := make([]*gqlmodel.Asset, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + assets[i], errors[i] = thunk() + } + return assets, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *AssetLoader) Prime(key gqlmodel.ID, value *gqlmodel.Asset) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *AssetLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *AssetLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Asset) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.Asset{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *assetLoaderBatch) keyIndex(l *AssetLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *assetLoaderBatch) startTimer(l *AssetLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *assetLoaderBatch) end(l *AssetLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/dataloader.go b/server/internal/adapter/gql/gqldataloader/dataloader.go new file mode 100644 index 000000000..19ac7e7c8 --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/dataloader.go @@ -0,0 +1,18 @@ +package gqldataloader + +//go:generate go run github.com/vektah/dataloaden AssetLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Asset +//go:generate go run github.com/vektah/dataloaden DatasetLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Dataset +//go:generate go run github.com/vektah/dataloaden DatasetSchemaLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.DatasetSchema +//go:generate go run github.com/vektah/dataloaden LayerLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Layer +//go:generate go run github.com/vektah/dataloaden LayerGroupLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.LayerGroup +//go:generate go run github.com/vektah/dataloaden LayerItemLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.LayerItem +//go:generate go run github.com/vektah/dataloaden PluginLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Plugin +//go:generate go run github.com/vektah/dataloaden ProjectLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Project +//go:generate go run github.com/vektah/dataloaden PropertyLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Property +//go:generate go run github.com/vektah/dataloaden PropertySchemaLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PropertySchema +//go:generate go run github.com/vektah/dataloaden SceneLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Scene +//go:generate go run github.com/vektah/dataloaden TeamLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Team +//go:generate go run github.com/vektah/dataloaden UserLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.User +//go:generate go run github.com/vektah/dataloaden TagLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Tag +//go:generate go run github.com/vektah/dataloaden TagItemLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.TagItem +//go:generate go run github.com/vektah/dataloaden TagGroupLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.TagGroup diff --git a/server/internal/adapter/gql/gqldataloader/datasetloader_gen.go b/server/internal/adapter/gql/gqldataloader/datasetloader_gen.go new file mode 100644 index 000000000..57b115ab7 --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/datasetloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// DatasetLoaderConfig captures the config to create a new DatasetLoader +type DatasetLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Dataset, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewDatasetLoader creates a new DatasetLoader given a fetch, wait, and maxBatch +func NewDatasetLoader(config DatasetLoaderConfig) *DatasetLoader { + return &DatasetLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// DatasetLoader batches and caches requests +type DatasetLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Dataset, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.Dataset + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *datasetLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type datasetLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.Dataset + error []error + closing bool + done chan struct{} +} + +// Load a Dataset by key, batching and caching will be applied automatically +func (l *DatasetLoader) Load(key gqlmodel.ID) (*gqlmodel.Dataset, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Dataset. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *DatasetLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Dataset, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.Dataset, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &datasetLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.Dataset, error) { + <-batch.done + + var data *gqlmodel.Dataset + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *DatasetLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Dataset, []error) { + results := make([]func() (*gqlmodel.Dataset, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + datasets := make([]*gqlmodel.Dataset, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + datasets[i], errors[i] = thunk() + } + return datasets, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Datasets. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *DatasetLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Dataset, []error) { + results := make([]func() (*gqlmodel.Dataset, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.Dataset, []error) { + datasets := make([]*gqlmodel.Dataset, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + datasets[i], errors[i] = thunk() + } + return datasets, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *DatasetLoader) Prime(key gqlmodel.ID, value *gqlmodel.Dataset) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *DatasetLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *DatasetLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Dataset) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.Dataset{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *datasetLoaderBatch) keyIndex(l *DatasetLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *datasetLoaderBatch) startTimer(l *DatasetLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *datasetLoaderBatch) end(l *DatasetLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/datasetschemaloader_gen.go b/server/internal/adapter/gql/gqldataloader/datasetschemaloader_gen.go new file mode 100644 index 000000000..9fe7e8ffa --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/datasetschemaloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// DatasetSchemaLoaderConfig captures the config to create a new DatasetSchemaLoader +type DatasetSchemaLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewDatasetSchemaLoader creates a new DatasetSchemaLoader given a fetch, wait, and maxBatch +func NewDatasetSchemaLoader(config DatasetSchemaLoaderConfig) *DatasetSchemaLoader { + return &DatasetSchemaLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// DatasetSchemaLoader batches and caches requests +type DatasetSchemaLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.DatasetSchema + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *datasetSchemaLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type datasetSchemaLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.DatasetSchema + error []error + closing bool + done chan struct{} +} + +// Load a DatasetSchema by key, batching and caching will be applied automatically +func (l *DatasetSchemaLoader) Load(key gqlmodel.ID) (*gqlmodel.DatasetSchema, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a DatasetSchema. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *DatasetSchemaLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.DatasetSchema, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.DatasetSchema, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &datasetSchemaLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.DatasetSchema, error) { + <-batch.done + + var data *gqlmodel.DatasetSchema + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *DatasetSchemaLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) { + results := make([]func() (*gqlmodel.DatasetSchema, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + datasetSchemas := make([]*gqlmodel.DatasetSchema, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + datasetSchemas[i], errors[i] = thunk() + } + return datasetSchemas, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a DatasetSchemas. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *DatasetSchemaLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.DatasetSchema, []error) { + results := make([]func() (*gqlmodel.DatasetSchema, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.DatasetSchema, []error) { + datasetSchemas := make([]*gqlmodel.DatasetSchema, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + datasetSchemas[i], errors[i] = thunk() + } + return datasetSchemas, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *DatasetSchemaLoader) Prime(key gqlmodel.ID, value *gqlmodel.DatasetSchema) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *DatasetSchemaLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *DatasetSchemaLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.DatasetSchema) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.DatasetSchema{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *datasetSchemaLoaderBatch) keyIndex(l *DatasetSchemaLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *datasetSchemaLoaderBatch) startTimer(l *DatasetSchemaLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *datasetSchemaLoaderBatch) end(l *DatasetSchemaLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/layergrouploader_gen.go b/server/internal/adapter/gql/gqldataloader/layergrouploader_gen.go new file mode 100644 index 000000000..0e46cf893 --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/layergrouploader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// LayerGroupLoaderConfig captures the config to create a new LayerGroupLoader +type LayerGroupLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewLayerGroupLoader creates a new LayerGroupLoader given a fetch, wait, and maxBatch +func NewLayerGroupLoader(config LayerGroupLoaderConfig) *LayerGroupLoader { + return &LayerGroupLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// LayerGroupLoader batches and caches requests +type LayerGroupLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.LayerGroup + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *layerGroupLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type layerGroupLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.LayerGroup + error []error + closing bool + done chan struct{} +} + +// Load a LayerGroup by key, batching and caching will be applied automatically +func (l *LayerGroupLoader) Load(key gqlmodel.ID) (*gqlmodel.LayerGroup, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a LayerGroup. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *LayerGroupLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.LayerGroup, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.LayerGroup, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &layerGroupLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.LayerGroup, error) { + <-batch.done + + var data *gqlmodel.LayerGroup + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *LayerGroupLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) { + results := make([]func() (*gqlmodel.LayerGroup, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + layerGroups := make([]*gqlmodel.LayerGroup, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + layerGroups[i], errors[i] = thunk() + } + return layerGroups, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a LayerGroups. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *LayerGroupLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.LayerGroup, []error) { + results := make([]func() (*gqlmodel.LayerGroup, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.LayerGroup, []error) { + layerGroups := make([]*gqlmodel.LayerGroup, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + layerGroups[i], errors[i] = thunk() + } + return layerGroups, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *LayerGroupLoader) Prime(key gqlmodel.ID, value *gqlmodel.LayerGroup) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *LayerGroupLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *LayerGroupLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.LayerGroup) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.LayerGroup{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *layerGroupLoaderBatch) keyIndex(l *LayerGroupLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *layerGroupLoaderBatch) startTimer(l *LayerGroupLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *layerGroupLoaderBatch) end(l *LayerGroupLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/layeritemloader_gen.go b/server/internal/adapter/gql/gqldataloader/layeritemloader_gen.go new file mode 100644 index 000000000..97562fba2 --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/layeritemloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// LayerItemLoaderConfig captures the config to create a new LayerItemLoader +type LayerItemLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewLayerItemLoader creates a new LayerItemLoader given a fetch, wait, and maxBatch +func NewLayerItemLoader(config LayerItemLoaderConfig) *LayerItemLoader { + return &LayerItemLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// LayerItemLoader batches and caches requests +type LayerItemLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.LayerItem + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *layerItemLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type layerItemLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.LayerItem + error []error + closing bool + done chan struct{} +} + +// Load a LayerItem by key, batching and caching will be applied automatically +func (l *LayerItemLoader) Load(key gqlmodel.ID) (*gqlmodel.LayerItem, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a LayerItem. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *LayerItemLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.LayerItem, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.LayerItem, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &layerItemLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.LayerItem, error) { + <-batch.done + + var data *gqlmodel.LayerItem + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *LayerItemLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) { + results := make([]func() (*gqlmodel.LayerItem, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + layerItems := make([]*gqlmodel.LayerItem, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + layerItems[i], errors[i] = thunk() + } + return layerItems, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a LayerItems. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *LayerItemLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.LayerItem, []error) { + results := make([]func() (*gqlmodel.LayerItem, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.LayerItem, []error) { + layerItems := make([]*gqlmodel.LayerItem, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + layerItems[i], errors[i] = thunk() + } + return layerItems, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *LayerItemLoader) Prime(key gqlmodel.ID, value *gqlmodel.LayerItem) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *LayerItemLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *LayerItemLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.LayerItem) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.LayerItem{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *layerItemLoaderBatch) keyIndex(l *LayerItemLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *layerItemLoaderBatch) startTimer(l *LayerItemLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *layerItemLoaderBatch) end(l *LayerItemLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/layerloader_gen.go b/server/internal/adapter/gql/gqldataloader/layerloader_gen.go new file mode 100644 index 000000000..f1cc7d4bd --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/layerloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// LayerLoaderConfig captures the config to create a new LayerLoader +type LayerLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewLayerLoader creates a new LayerLoader given a fetch, wait, and maxBatch +func NewLayerLoader(config LayerLoaderConfig) *LayerLoader { + return &LayerLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// LayerLoader batches and caches requests +type LayerLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.Layer + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *layerLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type layerLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.Layer + error []error + closing bool + done chan struct{} +} + +// Load a Layer by key, batching and caching will be applied automatically +func (l *LayerLoader) Load(key gqlmodel.ID) (*gqlmodel.Layer, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Layer. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *LayerLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Layer, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.Layer, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &layerLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.Layer, error) { + <-batch.done + + var data *gqlmodel.Layer + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *LayerLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) { + results := make([]func() (*gqlmodel.Layer, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + layers := make([]*gqlmodel.Layer, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + layers[i], errors[i] = thunk() + } + return layers, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Layers. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *LayerLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Layer, []error) { + results := make([]func() (*gqlmodel.Layer, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.Layer, []error) { + layers := make([]*gqlmodel.Layer, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + layers[i], errors[i] = thunk() + } + return layers, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *LayerLoader) Prime(key gqlmodel.ID, value *gqlmodel.Layer) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *LayerLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *LayerLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Layer) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.Layer{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *layerLoaderBatch) keyIndex(l *LayerLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *layerLoaderBatch) startTimer(l *LayerLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *layerLoaderBatch) end(l *LayerLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/pluginloader_gen.go b/server/internal/adapter/gql/gqldataloader/pluginloader_gen.go new file mode 100644 index 000000000..e8b53ba23 --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/pluginloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// PluginLoaderConfig captures the config to create a new PluginLoader +type PluginLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewPluginLoader creates a new PluginLoader given a fetch, wait, and maxBatch +func NewPluginLoader(config PluginLoaderConfig) *PluginLoader { + return &PluginLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// PluginLoader batches and caches requests +type PluginLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.Plugin + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *pluginLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type pluginLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.Plugin + error []error + closing bool + done chan struct{} +} + +// Load a Plugin by key, batching and caching will be applied automatically +func (l *PluginLoader) Load(key gqlmodel.ID) (*gqlmodel.Plugin, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Plugin. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *PluginLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Plugin, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.Plugin, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &pluginLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.Plugin, error) { + <-batch.done + + var data *gqlmodel.Plugin + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *PluginLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) { + results := make([]func() (*gqlmodel.Plugin, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + plugins := make([]*gqlmodel.Plugin, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + plugins[i], errors[i] = thunk() + } + return plugins, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Plugins. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *PluginLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Plugin, []error) { + results := make([]func() (*gqlmodel.Plugin, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.Plugin, []error) { + plugins := make([]*gqlmodel.Plugin, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + plugins[i], errors[i] = thunk() + } + return plugins, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *PluginLoader) Prime(key gqlmodel.ID, value *gqlmodel.Plugin) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *PluginLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *PluginLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Plugin) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.Plugin{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *pluginLoaderBatch) keyIndex(l *PluginLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *pluginLoaderBatch) startTimer(l *PluginLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *pluginLoaderBatch) end(l *PluginLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/projectloader_gen.go b/server/internal/adapter/gql/gqldataloader/projectloader_gen.go new file mode 100644 index 000000000..31540c646 --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/projectloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// ProjectLoaderConfig captures the config to create a new ProjectLoader +type ProjectLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewProjectLoader creates a new ProjectLoader given a fetch, wait, and maxBatch +func NewProjectLoader(config ProjectLoaderConfig) *ProjectLoader { + return &ProjectLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// ProjectLoader batches and caches requests +type ProjectLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.Project + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *projectLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type projectLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.Project + error []error + closing bool + done chan struct{} +} + +// Load a Project by key, batching and caching will be applied automatically +func (l *ProjectLoader) Load(key gqlmodel.ID) (*gqlmodel.Project, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Project. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *ProjectLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Project, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.Project, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &projectLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.Project, error) { + <-batch.done + + var data *gqlmodel.Project + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *ProjectLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) { + results := make([]func() (*gqlmodel.Project, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + projects := make([]*gqlmodel.Project, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + projects[i], errors[i] = thunk() + } + return projects, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Projects. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *ProjectLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Project, []error) { + results := make([]func() (*gqlmodel.Project, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.Project, []error) { + projects := make([]*gqlmodel.Project, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + projects[i], errors[i] = thunk() + } + return projects, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *ProjectLoader) Prime(key gqlmodel.ID, value *gqlmodel.Project) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *ProjectLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *ProjectLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Project) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.Project{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *projectLoaderBatch) keyIndex(l *ProjectLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *projectLoaderBatch) startTimer(l *ProjectLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *projectLoaderBatch) end(l *ProjectLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/propertyloader_gen.go b/server/internal/adapter/gql/gqldataloader/propertyloader_gen.go new file mode 100644 index 000000000..20a20430e --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/propertyloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// PropertyLoaderConfig captures the config to create a new PropertyLoader +type PropertyLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewPropertyLoader creates a new PropertyLoader given a fetch, wait, and maxBatch +func NewPropertyLoader(config PropertyLoaderConfig) *PropertyLoader { + return &PropertyLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// PropertyLoader batches and caches requests +type PropertyLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.Property + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *propertyLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type propertyLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.Property + error []error + closing bool + done chan struct{} +} + +// Load a Property by key, batching and caching will be applied automatically +func (l *PropertyLoader) Load(key gqlmodel.ID) (*gqlmodel.Property, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Property. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *PropertyLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Property, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.Property, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &propertyLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.Property, error) { + <-batch.done + + var data *gqlmodel.Property + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *PropertyLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) { + results := make([]func() (*gqlmodel.Property, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + propertys := make([]*gqlmodel.Property, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + propertys[i], errors[i] = thunk() + } + return propertys, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Propertys. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *PropertyLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Property, []error) { + results := make([]func() (*gqlmodel.Property, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.Property, []error) { + propertys := make([]*gqlmodel.Property, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + propertys[i], errors[i] = thunk() + } + return propertys, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *PropertyLoader) Prime(key gqlmodel.ID, value *gqlmodel.Property) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *PropertyLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *PropertyLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Property) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.Property{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *propertyLoaderBatch) keyIndex(l *PropertyLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *propertyLoaderBatch) startTimer(l *PropertyLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *propertyLoaderBatch) end(l *PropertyLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/propertyschemaloader_gen.go b/server/internal/adapter/gql/gqldataloader/propertyschemaloader_gen.go new file mode 100644 index 000000000..253e408cf --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/propertyschemaloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// PropertySchemaLoaderConfig captures the config to create a new PropertySchemaLoader +type PropertySchemaLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewPropertySchemaLoader creates a new PropertySchemaLoader given a fetch, wait, and maxBatch +func NewPropertySchemaLoader(config PropertySchemaLoaderConfig) *PropertySchemaLoader { + return &PropertySchemaLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// PropertySchemaLoader batches and caches requests +type PropertySchemaLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.PropertySchema + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *propertySchemaLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type propertySchemaLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.PropertySchema + error []error + closing bool + done chan struct{} +} + +// Load a PropertySchema by key, batching and caching will be applied automatically +func (l *PropertySchemaLoader) Load(key gqlmodel.ID) (*gqlmodel.PropertySchema, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a PropertySchema. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *PropertySchemaLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.PropertySchema, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.PropertySchema, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &propertySchemaLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.PropertySchema, error) { + <-batch.done + + var data *gqlmodel.PropertySchema + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *PropertySchemaLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) { + results := make([]func() (*gqlmodel.PropertySchema, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + propertySchemas := make([]*gqlmodel.PropertySchema, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + propertySchemas[i], errors[i] = thunk() + } + return propertySchemas, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a PropertySchemas. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *PropertySchemaLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.PropertySchema, []error) { + results := make([]func() (*gqlmodel.PropertySchema, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.PropertySchema, []error) { + propertySchemas := make([]*gqlmodel.PropertySchema, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + propertySchemas[i], errors[i] = thunk() + } + return propertySchemas, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *PropertySchemaLoader) Prime(key gqlmodel.ID, value *gqlmodel.PropertySchema) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *PropertySchemaLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *PropertySchemaLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.PropertySchema) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.PropertySchema{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *propertySchemaLoaderBatch) keyIndex(l *PropertySchemaLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *propertySchemaLoaderBatch) startTimer(l *PropertySchemaLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *propertySchemaLoaderBatch) end(l *PropertySchemaLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/sceneloader_gen.go b/server/internal/adapter/gql/gqldataloader/sceneloader_gen.go new file mode 100644 index 000000000..233b4cdde --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/sceneloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// SceneLoaderConfig captures the config to create a new SceneLoader +type SceneLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewSceneLoader creates a new SceneLoader given a fetch, wait, and maxBatch +func NewSceneLoader(config SceneLoaderConfig) *SceneLoader { + return &SceneLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// SceneLoader batches and caches requests +type SceneLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.Scene + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *sceneLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type sceneLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.Scene + error []error + closing bool + done chan struct{} +} + +// Load a Scene by key, batching and caching will be applied automatically +func (l *SceneLoader) Load(key gqlmodel.ID) (*gqlmodel.Scene, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Scene. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *SceneLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Scene, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.Scene, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &sceneLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.Scene, error) { + <-batch.done + + var data *gqlmodel.Scene + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *SceneLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) { + results := make([]func() (*gqlmodel.Scene, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + scenes := make([]*gqlmodel.Scene, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + scenes[i], errors[i] = thunk() + } + return scenes, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Scenes. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *SceneLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Scene, []error) { + results := make([]func() (*gqlmodel.Scene, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.Scene, []error) { + scenes := make([]*gqlmodel.Scene, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + scenes[i], errors[i] = thunk() + } + return scenes, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *SceneLoader) Prime(key gqlmodel.ID, value *gqlmodel.Scene) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *SceneLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *SceneLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Scene) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.Scene{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *sceneLoaderBatch) keyIndex(l *SceneLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *sceneLoaderBatch) startTimer(l *SceneLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *sceneLoaderBatch) end(l *SceneLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/taggrouploader_gen.go b/server/internal/adapter/gql/gqldataloader/taggrouploader_gen.go new file mode 100644 index 000000000..d50e6e693 --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/taggrouploader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// TagGroupLoaderConfig captures the config to create a new TagGroupLoader +type TagGroupLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewTagGroupLoader creates a new TagGroupLoader given a fetch, wait, and maxBatch +func NewTagGroupLoader(config TagGroupLoaderConfig) *TagGroupLoader { + return &TagGroupLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// TagGroupLoader batches and caches requests +type TagGroupLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.TagGroup + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *tagGroupLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type tagGroupLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.TagGroup + error []error + closing bool + done chan struct{} +} + +// Load a TagGroup by key, batching and caching will be applied automatically +func (l *TagGroupLoader) Load(key gqlmodel.ID) (*gqlmodel.TagGroup, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a TagGroup. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TagGroupLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.TagGroup, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.TagGroup, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &tagGroupLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.TagGroup, error) { + <-batch.done + + var data *gqlmodel.TagGroup + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *TagGroupLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) { + results := make([]func() (*gqlmodel.TagGroup, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + tagGroups := make([]*gqlmodel.TagGroup, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + tagGroups[i], errors[i] = thunk() + } + return tagGroups, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a TagGroups. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TagGroupLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.TagGroup, []error) { + results := make([]func() (*gqlmodel.TagGroup, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.TagGroup, []error) { + tagGroups := make([]*gqlmodel.TagGroup, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + tagGroups[i], errors[i] = thunk() + } + return tagGroups, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *TagGroupLoader) Prime(key gqlmodel.ID, value *gqlmodel.TagGroup) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *TagGroupLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *TagGroupLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.TagGroup) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.TagGroup{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *tagGroupLoaderBatch) keyIndex(l *TagGroupLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *tagGroupLoaderBatch) startTimer(l *TagGroupLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *tagGroupLoaderBatch) end(l *TagGroupLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/tagitemloader_gen.go b/server/internal/adapter/gql/gqldataloader/tagitemloader_gen.go new file mode 100644 index 000000000..90ad04a4c --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/tagitemloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// TagItemLoaderConfig captures the config to create a new TagItemLoader +type TagItemLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewTagItemLoader creates a new TagItemLoader given a fetch, wait, and maxBatch +func NewTagItemLoader(config TagItemLoaderConfig) *TagItemLoader { + return &TagItemLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// TagItemLoader batches and caches requests +type TagItemLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.TagItem + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *tagItemLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type tagItemLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.TagItem + error []error + closing bool + done chan struct{} +} + +// Load a TagItem by key, batching and caching will be applied automatically +func (l *TagItemLoader) Load(key gqlmodel.ID) (*gqlmodel.TagItem, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a TagItem. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TagItemLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.TagItem, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.TagItem, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &tagItemLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.TagItem, error) { + <-batch.done + + var data *gqlmodel.TagItem + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *TagItemLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) { + results := make([]func() (*gqlmodel.TagItem, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + tagItems := make([]*gqlmodel.TagItem, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + tagItems[i], errors[i] = thunk() + } + return tagItems, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a TagItems. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TagItemLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.TagItem, []error) { + results := make([]func() (*gqlmodel.TagItem, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.TagItem, []error) { + tagItems := make([]*gqlmodel.TagItem, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + tagItems[i], errors[i] = thunk() + } + return tagItems, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *TagItemLoader) Prime(key gqlmodel.ID, value *gqlmodel.TagItem) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *TagItemLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *TagItemLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.TagItem) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.TagItem{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *tagItemLoaderBatch) keyIndex(l *TagItemLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *tagItemLoaderBatch) startTimer(l *TagItemLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *tagItemLoaderBatch) end(l *TagItemLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/tagloader_gen.go b/server/internal/adapter/gql/gqldataloader/tagloader_gen.go new file mode 100644 index 000000000..909c22983 --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/tagloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// TagLoaderConfig captures the config to create a new TagLoader +type TagLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewTagLoader creates a new TagLoader given a fetch, wait, and maxBatch +func NewTagLoader(config TagLoaderConfig) *TagLoader { + return &TagLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// TagLoader batches and caches requests +type TagLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.Tag + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *tagLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type tagLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.Tag + error []error + closing bool + done chan struct{} +} + +// Load a Tag by key, batching and caching will be applied automatically +func (l *TagLoader) Load(key gqlmodel.ID) (*gqlmodel.Tag, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Tag. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TagLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Tag, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.Tag, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &tagLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.Tag, error) { + <-batch.done + + var data *gqlmodel.Tag + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *TagLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) { + results := make([]func() (*gqlmodel.Tag, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + tags := make([]*gqlmodel.Tag, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + tags[i], errors[i] = thunk() + } + return tags, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Tags. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TagLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Tag, []error) { + results := make([]func() (*gqlmodel.Tag, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.Tag, []error) { + tags := make([]*gqlmodel.Tag, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + tags[i], errors[i] = thunk() + } + return tags, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *TagLoader) Prime(key gqlmodel.ID, value *gqlmodel.Tag) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *TagLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *TagLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Tag) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.Tag{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *tagLoaderBatch) keyIndex(l *TagLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *tagLoaderBatch) startTimer(l *TagLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *tagLoaderBatch) end(l *TagLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/teamloader_gen.go b/server/internal/adapter/gql/gqldataloader/teamloader_gen.go new file mode 100644 index 000000000..9c5fbb903 --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/teamloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// TeamLoaderConfig captures the config to create a new TeamLoader +type TeamLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewTeamLoader creates a new TeamLoader given a fetch, wait, and maxBatch +func NewTeamLoader(config TeamLoaderConfig) *TeamLoader { + return &TeamLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// TeamLoader batches and caches requests +type TeamLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.Team + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *teamLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type teamLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.Team + error []error + closing bool + done chan struct{} +} + +// Load a Team by key, batching and caching will be applied automatically +func (l *TeamLoader) Load(key gqlmodel.ID) (*gqlmodel.Team, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Team. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TeamLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Team, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.Team, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &teamLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.Team, error) { + <-batch.done + + var data *gqlmodel.Team + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *TeamLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) { + results := make([]func() (*gqlmodel.Team, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + teams := make([]*gqlmodel.Team, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + teams[i], errors[i] = thunk() + } + return teams, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Teams. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TeamLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Team, []error) { + results := make([]func() (*gqlmodel.Team, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.Team, []error) { + teams := make([]*gqlmodel.Team, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + teams[i], errors[i] = thunk() + } + return teams, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *TeamLoader) Prime(key gqlmodel.ID, value *gqlmodel.Team) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *TeamLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *TeamLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Team) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.Team{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *teamLoaderBatch) keyIndex(l *TeamLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *teamLoaderBatch) startTimer(l *TeamLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *teamLoaderBatch) end(l *TeamLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqldataloader/userloader_gen.go b/server/internal/adapter/gql/gqldataloader/userloader_gen.go new file mode 100644 index 000000000..85bd08cde --- /dev/null +++ b/server/internal/adapter/gql/gqldataloader/userloader_gen.go @@ -0,0 +1,224 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +// UserLoaderConfig captures the config to create a new UserLoader +type UserLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewUserLoader creates a new UserLoader given a fetch, wait, and maxBatch +func NewUserLoader(config UserLoaderConfig) *UserLoader { + return &UserLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// UserLoader batches and caches requests +type UserLoader struct { + // this method provides the data for the loader + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[gqlmodel.ID]*gqlmodel.User + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *userLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type userLoaderBatch struct { + keys []gqlmodel.ID + data []*gqlmodel.User + error []error + closing bool + done chan struct{} +} + +// Load a User by key, batching and caching will be applied automatically +func (l *UserLoader) Load(key gqlmodel.ID) (*gqlmodel.User, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a User. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *UserLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.User, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.User, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &userLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.User, error) { + <-batch.done + + var data *gqlmodel.User + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *UserLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) { + results := make([]func() (*gqlmodel.User, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + users := make([]*gqlmodel.User, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + users[i], errors[i] = thunk() + } + return users, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Users. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *UserLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.User, []error) { + results := make([]func() (*gqlmodel.User, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.User, []error) { + users := make([]*gqlmodel.User, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + users[i], errors[i] = thunk() + } + return users, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *UserLoader) Prime(key gqlmodel.ID, value *gqlmodel.User) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *UserLoader) Clear(key gqlmodel.ID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *UserLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.User) { + if l.cache == nil { + l.cache = map[gqlmodel.ID]*gqlmodel.User{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *userLoaderBatch) keyIndex(l *UserLoader, key gqlmodel.ID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *userLoaderBatch) startTimer(l *UserLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *userLoaderBatch) end(l *UserLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/server/internal/adapter/gql/gqlmodel/convert.go b/server/internal/adapter/gql/gqlmodel/convert.go new file mode 100644 index 000000000..eecaa6363 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert.go @@ -0,0 +1,99 @@ +package gqlmodel + +import ( + "io" + + "github.com/99designs/gqlgen/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +func RefToIndex(i *int) int { + if i == nil { + return -1 + } + return *i +} + +func RefToString(s *string) string { + if s == nil { + return "" + } + return *s +} + +func stringToRef(s string) *string { + if s == "" { + return nil + } + return &s +} + +func BoolToRef(b bool) *bool { + return &b +} + +func ToPageInfo(p *usecase.PageInfo) *PageInfo { + if p == nil { + return &PageInfo{} + } + return &PageInfo{ + StartCursor: p.StartCursor(), + EndCursor: p.EndCursor(), + HasNextPage: p.HasNextPage(), + HasPreviousPage: p.HasPreviousPage(), + } +} + +func ToVisualizer(v visualizer.Visualizer) Visualizer { + switch v { + case visualizer.VisualizerCesium: + return VisualizerCesium + } + return Visualizer("") +} + +func ToVisualizerRef(v visualizer.Visualizer) *Visualizer { + if v == "" { + return nil + } + v2 := ToVisualizer(v) + return &v2 +} + +func FromFile(f *graphql.Upload) *file.File { + if f == nil { + return nil + } + return &file.File{ + Content: io.NopCloser(f.File), + Path: f.Filename, + Size: f.Size, + ContentType: f.ContentType, + } +} + +func FromListOperation(op ListOperation) interfaces.ListOperation { + if op == ListOperationAdd { + return interfaces.ListOperationAdd + } else if op == ListOperationMove { + return interfaces.ListOperationMove + } else if op == ListOperationRemove { + return interfaces.ListOperationRemove + } + return interfaces.ListOperation("") +} + +func ToPagination(pagination *Pagination) *usecase.Pagination { + if pagination == nil { + return nil + } + return &usecase.Pagination{ + Before: pagination.Before, + After: pagination.After, + First: pagination.First, + Last: pagination.Last, + } +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_asset.go b/server/internal/adapter/gql/gqlmodel/convert_asset.go new file mode 100644 index 000000000..675eb8a19 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_asset.go @@ -0,0 +1,37 @@ +package gqlmodel + +import ( + "github.com/reearth/reearth-backend/pkg/asset" +) + +func ToAsset(a *asset.Asset) *Asset { + if a == nil { + return nil + } + + return &Asset{ + ID: IDFrom(a.ID()), + CreatedAt: a.CreatedAt(), + TeamID: IDFrom(a.Team()), + Name: a.Name(), + Size: a.Size(), + URL: a.URL(), + ContentType: a.ContentType(), + } +} + +func AssetSortTypeFrom(ast *AssetSortType) *asset.SortType { + if ast == nil { + return nil + } + + switch *ast { + case AssetSortTypeDate: + return &asset.SortTypeID + case AssetSortTypeName: + return &asset.SortTypeName + case AssetSortTypeSize: + return &asset.SortTypeSize + } + return &asset.SortTypeID +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_dataset.go b/server/internal/adapter/gql/gqlmodel/convert_dataset.go new file mode 100644 index 000000000..29f020a1d --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_dataset.go @@ -0,0 +1,65 @@ +package gqlmodel + +import ( + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/util" + "github.com/reearth/reearth-backend/pkg/value" +) + +func ToDatasetValue(v *dataset.Value) *interface{} { + i := valueInterfaceToGqlValue(v.Value()) + return &i +} + +func ToDatasetField(f *dataset.Field, parent *dataset.Dataset) *DatasetField { + if f == nil || parent == nil { + return nil + } + + return &DatasetField{ + SchemaID: IDFrom(parent.Schema()), + FieldID: IDFrom(f.Field()), + Type: ToValueType(value.Type(f.Type())), + Value: ToDatasetValue(f.Value()), + Source: f.Source(), + } +} + +func ToDataset(ds *dataset.Dataset) *Dataset { + if ds == nil { + return nil + } + + return &Dataset{ + ID: IDFrom(ds.ID()), + SchemaID: IDFrom(ds.Schema()), + Source: ds.Source(), + Fields: util.FilterMapR(ds.Fields(), func(f *dataset.Field) *DatasetField { + return ToDatasetField(f, ds) + }), + } +} + +func ToDatasetSchema(ds *dataset.Schema) *DatasetSchema { + if ds == nil { + return nil + } + + return &DatasetSchema{ + ID: IDFrom(ds.ID()), + Source: ds.Source(), + Name: ds.Name(), + SceneID: IDFrom(ds.Scene()), + RepresentativeFieldID: IDFromRef(ds.RepresentativeField().IDRef()), + Fields: util.Map(ds.Fields(), func(f *dataset.SchemaField) *DatasetSchemaField { + return &DatasetSchemaField{ + ID: IDFrom(f.ID()), + Name: f.Name(), + Type: ToValueType(value.Type(f.Type())), + SchemaID: IDFrom(ds.ID()), + Source: f.Source(), + RefID: IDFromRef(f.Ref()), + } + }), + } +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_layer.go b/server/internal/adapter/gql/gqlmodel/convert_layer.go new file mode 100644 index 000000000..00fca5667 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_layer.go @@ -0,0 +1,208 @@ +package gqlmodel + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/decoding" + "github.com/reearth/reearth-backend/pkg/util" +) + +func ToLayerItem(l *layer.Item, parent *id.LayerID) *LayerItem { + if l == nil { + return nil + } + + return &LayerItem{ + ID: IDFrom(l.ID()), + SceneID: IDFrom(l.Scene()), + Name: l.Name(), + IsVisible: l.IsVisible(), + PropertyID: IDFromRef(l.Property()), + PluginID: IDFromPluginIDRef(l.Plugin()), + ExtensionID: IDFromStringRef(l.Extension()), + Infobox: ToInfobox(l.Infobox(), l.ID(), l.Scene(), l.LinkedDataset()), + LinkedDatasetID: IDFromRef(l.LinkedDataset()), + ParentID: IDFromRef[id.Layer](parent), + Tags: ToLayerTagList(l.Tags(), l.Scene()), + } +} + +func ToLayerGroup(l *layer.Group, parent *id.LayerID) *LayerGroup { + if l == nil { + return nil + } + + return &LayerGroup{ + ID: IDFrom(l.ID()), + SceneID: IDFrom(l.Scene()), + Name: l.Name(), + IsVisible: l.IsVisible(), + PropertyID: IDFromRef(l.Property()), + PluginID: IDFromPluginIDRef(l.Plugin()), + ExtensionID: IDFromStringRef(l.Extension()), + Infobox: ToInfobox(l.Infobox(), l.ID(), l.Scene(), nil), + LinkedDatasetSchemaID: IDFromRef(l.LinkedDatasetSchema()), + LayerIds: util.Map(l.Layers().Layers(), IDFrom[id.Layer]), + Root: l.IsRoot(), + ParentID: IDFromRef[id.Layer](parent), + Tags: ToLayerTagList(l.Tags(), l.Scene()), + } +} + +func ToLayer(l layer.Layer, parent *id.LayerID) Layer { + if l == nil { + return nil + } + + switch la := l.(type) { + case *layer.Item: + return ToLayerItem(la, parent) + case *layer.Group: + return ToLayerGroup(la, parent) + } + return nil +} + +func ToLayers(layers layer.List, parent *id.LayerID) []Layer { + return util.Map(layers, func(l *layer.Layer) Layer { + return ToLayer(*l, parent) + }) +} + +func ToInfoboxField(ibf *layer.InfoboxField, parentSceneID id.SceneID, parentDatasetID *id.DatasetID) *InfoboxField { + if ibf == nil { + return nil + } + + return &InfoboxField{ + ID: IDFrom(ibf.ID()), + SceneID: IDFrom(parentSceneID), + PluginID: IDFromPluginID(ibf.Plugin()), + ExtensionID: ID(ibf.Extension()), + PropertyID: IDFrom(ibf.Property()), + LinkedDatasetID: IDFromRef[id.Dataset](parentDatasetID), + } +} + +func ToInfobox(ib *layer.Infobox, parent id.LayerID, parentSceneID id.SceneID, parentDatasetID *id.DatasetID) *Infobox { + if ib == nil { + return nil + } + ibFields := ib.Fields() + fields := make([]*InfoboxField, 0, len(ibFields)) + for _, ibf := range ibFields { + fields = append(fields, ToInfoboxField(ibf, parentSceneID, parentDatasetID)) + } + + return &Infobox{ + SceneID: IDFrom(parentSceneID), + PropertyID: IDFrom(ib.Property()), + Fields: fields, + LayerID: IDFrom(parent), + LinkedDatasetID: IDFromRef[id.Dataset](parentDatasetID), + } +} + +func ToMergedLayer(layer *layer.Merged) *MergedLayer { + if layer == nil { + return nil + } + + return &MergedLayer{ + SceneID: IDFrom(layer.Scene), + OriginalID: IDFrom(layer.Original), + ParentID: IDFromRef(layer.Parent), + Infobox: ToMergedInfobox(layer.Infobox, layer.Scene), + Property: ToMergedPropertyFromMetadata(layer.Property), + } +} + +func ToMergedInfobox(ib *layer.MergedInfobox, sceneID id.SceneID) *MergedInfobox { + if ib == nil { + return nil + } + + return &MergedInfobox{ + SceneID: IDFrom(sceneID), + Fields: util.Map(ib.Fields, func(f *layer.MergedInfoboxField) *MergedInfoboxField { + return ToMergedInfoboxField(f, sceneID) + }), + Property: ToMergedPropertyFromMetadata(ib.Property), + } +} + +func ToMergedInfoboxField(ibf *layer.MergedInfoboxField, sceneID id.SceneID) *MergedInfoboxField { + if ibf == nil { + return nil + } + + return &MergedInfoboxField{ + SceneID: IDFrom(sceneID), + OriginalID: IDFrom(ibf.ID), + PluginID: IDFromPluginID(ibf.Plugin), + ExtensionID: ID(ibf.Extension), + Property: ToMergedPropertyFromMetadata(ibf.Property), + } +} +func FromLayerEncodingFormat(v LayerEncodingFormat) decoding.LayerEncodingFormat { + switch v { + case LayerEncodingFormatKml: + return decoding.LayerEncodingFormatKML + case LayerEncodingFormatCzml: + return decoding.LayerEncodingFormatCZML + case LayerEncodingFormatGeojson: + return decoding.LayerEncodingFormatGEOJSON + case LayerEncodingFormatShape: + return decoding.LayerEncodingFormatSHAPE + case LayerEncodingFormatReearth: + return decoding.LayerEncodingFormatREEARTH + } + + return decoding.LayerEncodingFormat("") +} + +func ToLayerTagList(t *layer.TagList, sid id.SceneID) []LayerTag { + if t.IsEmpty() { + return nil + } + + return util.FilterMap(t.Tags(), func(v layer.Tag) *LayerTag { + if t := ToLayerTag(v); t != nil { + return &t + } + return nil + }) +} + +func ToLayerTag(l layer.Tag) LayerTag { + if l == nil { + return nil + } + if tg := layer.TagGroupFrom(l); tg != nil { + return ToLayerTagGroup(tg) + } + if ti := layer.TagItemFrom(l); ti != nil { + return ToLayerTagItem(ti) + } + return nil +} + +func ToLayerTagItem(t *layer.TagItem) *LayerTagItem { + if t == nil { + return nil + } + return &LayerTagItem{ + TagID: IDFrom(t.ID()), + } +} + +func ToLayerTagGroup(t *layer.TagGroup) *LayerTagGroup { + if t == nil { + return nil + } + + return &LayerTagGroup{ + TagID: IDFrom(t.ID()), + Children: util.FilterMapR(t.Children(), ToLayerTagItem), + } +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_plugin.go b/server/internal/adapter/gql/gqlmodel/convert_plugin.go new file mode 100644 index 000000000..485d2fb4e --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_plugin.go @@ -0,0 +1,120 @@ +package gqlmodel + +import ( + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/util" +) + +func ToPlugin(p *plugin.Plugin) *Plugin { + if p == nil { + return nil + } + + pid := IDFromPluginID(p.ID()) + return &Plugin{ + ID: pid, + SceneID: IDFromRef(p.ID().Scene()), + Name: p.Name().String(), + Description: p.Description().String(), + AllTranslatedDescription: p.Description(), + AllTranslatedName: p.Name(), + Author: p.Author(), + RepositoryURL: p.RepositoryURL(), + Version: p.Version().String(), + PropertySchemaID: IDFromPropertySchemaIDRef(p.Schema()), + Extensions: util.Map(p.Extensions(), func(pe *plugin.Extension) *PluginExtension { + return &PluginExtension{ + ExtensionID: ID(pe.ID()), + PluginID: pid, + Type: ToPluginExtensionType(pe.Type()), + Visualizer: ToVisualizerRef(pe.Visualizer()), + Name: pe.Name().String(), + Description: pe.Description().String(), + Icon: pe.Icon(), + SingleOnly: BoolToRef(pe.SingleOnly()), + WidgetLayout: ToPluginWidgetLayout(pe.WidgetLayout()), + PropertySchemaID: IDFromPropertySchemaID(pe.Schema()), + AllTranslatedDescription: pe.Description(), + AllTranslatedName: pe.Name(), + } + }), + } +} + +func ToPluginExtensionType(t plugin.ExtensionType) PluginExtensionType { + switch t { + case plugin.ExtensionTypePrimitive: + return PluginExtensionTypePrimitive + case plugin.ExtensionTypeWidget: + return PluginExtensionTypeWidget + case plugin.ExtensionTypeBlock: + return PluginExtensionTypeBlock + case plugin.ExtensionTypeVisualizer: + return PluginExtensionTypeVisualizer + case plugin.ExtensionTypeInfobox: + return PluginExtensionTypeInfobox + } + return PluginExtensionType("") +} + +func ToPluginWidgetLayout(wl *plugin.WidgetLayout) *WidgetLayout { + if wl == nil { + return nil + } + + return &WidgetLayout{ + Extendable: &WidgetExtendable{ + Horizontally: wl.HorizontallyExtendable(), + Vertically: wl.VerticallyExtendable(), + }, + Extended: wl.Extended(), + Floating: wl.Floating(), + DefaultLocation: ToPluginWidgetLocation(wl.DefaultLocation()), + } +} + +func ToPluginWidgetLocation(l *plugin.WidgetLocation) *WidgetLocation { + if l == nil { + return nil + } + + return &WidgetLocation{ + Zone: ToPluginWidgetZoneType(l.Zone), + Section: ToPluginWidgetSectionType(l.Section), + Area: ToPluginWidgetAreaType(l.Area), + } +} + +func ToPluginWidgetZoneType(t plugin.WidgetZoneType) WidgetZoneType { + switch t { + case plugin.WidgetZoneInner: + return WidgetZoneTypeInner + case plugin.WidgetZoneOuter: + return WidgetZoneTypeOuter + } + return "" +} + +func ToPluginWidgetSectionType(t plugin.WidgetSectionType) WidgetSectionType { + switch t { + case plugin.WidgetSectionLeft: + return WidgetSectionTypeLeft + case plugin.WidgetSectionCenter: + return WidgetSectionTypeCenter + case plugin.WidgetSectionRight: + return WidgetSectionTypeRight + } + return "" +} + +func ToPluginWidgetAreaType(t plugin.WidgetAreaType) WidgetAreaType { + switch t { + case plugin.WidgetAreaTop: + return WidgetAreaTypeTop + case plugin.WidgetAreaMiddle: + return WidgetAreaTypeMiddle + case plugin.WidgetAreaBottom: + return WidgetAreaTypeBottom + } + return "" +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_project.go b/server/internal/adapter/gql/gqlmodel/convert_project.go new file mode 100644 index 000000000..ce8a1249a --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_project.go @@ -0,0 +1,64 @@ +package gqlmodel + +import ( + "time" + + "github.com/reearth/reearth-backend/pkg/project" +) + +func FromPublishmentStatus(v PublishmentStatus) project.PublishmentStatus { + switch v { + case PublishmentStatusPublic: + return project.PublishmentStatusPublic + case PublishmentStatusLimited: + return project.PublishmentStatusLimited + case PublishmentStatusPrivate: + return project.PublishmentStatusPrivate + } + return project.PublishmentStatus("") +} + +func ToPublishmentStatus(v project.PublishmentStatus) PublishmentStatus { + switch v { + case project.PublishmentStatusPublic: + return PublishmentStatusPublic + case project.PublishmentStatusLimited: + return PublishmentStatusLimited + case project.PublishmentStatusPrivate: + return PublishmentStatusPrivate + } + return PublishmentStatus("") +} + +func ToProject(p *project.Project) *Project { + if p == nil { + return nil + } + + var publishedAtRes *time.Time + if publishedAt := p.PublishedAt(); !publishedAt.IsZero() { + publishedAtRes = &publishedAt + } + + return &Project{ + ID: IDFrom(p.ID()), + CreatedAt: p.CreatedAt(), + IsArchived: p.IsArchived(), + IsBasicAuthActive: p.IsBasicAuthActive(), + BasicAuthUsername: p.BasicAuthUsername(), + BasicAuthPassword: p.BasicAuthPassword(), + Alias: p.Alias(), + Name: p.Name(), + Description: p.Description(), + ImageURL: p.ImageURL(), + PublishedAt: publishedAtRes, + UpdatedAt: p.UpdatedAt(), + Visualizer: Visualizer(p.Visualizer()), + TeamID: IDFrom(p.Team()), + PublishmentStatus: ToPublishmentStatus(p.PublishmentStatus()), + PublicTitle: p.PublicTitle(), + PublicDescription: p.PublicDescription(), + PublicImage: p.PublicImage(), + PublicNoIndex: p.PublicNoIndex(), + } +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_property.go b/server/internal/adapter/gql/gqlmodel/convert_property.go new file mode 100644 index 000000000..16fbcce52 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_property.go @@ -0,0 +1,482 @@ +package gqlmodel + +import ( + "strings" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/util" + "github.com/reearth/reearth-backend/pkg/value" +) + +func ToPropertyValue(v *property.Value) *interface{} { + var res interface{} + if v == nil { + return nil + } + switch v2 := v.Value().(type) { + case property.Camera: + res = Camera{ + Lat: v2.Lat, + Lng: v2.Lng, + Altitude: v2.Altitude, + Heading: v2.Heading, + Pitch: v2.Pitch, + Roll: v2.Roll, + Fov: v2.FOV, + } + case property.Typography: + res = Typography{ + FontFamily: v2.FontFamily, + FontSize: v2.FontSize, + FontWeight: v2.FontWeight, + Color: v2.Color, + TextAlign: ToTextAlign(v2.TextAlign), + Bold: v2.Bold, + Italic: v2.Italic, + Underline: v2.Underline, + } + default: + res = valueInterfaceToGqlValue(v2) + } + return &res +} + +func ToTextAlign(t *property.TextAlign) *TextAlign { + if t == nil { + return nil + } + var t3 TextAlign + switch *t { + case property.TextAlignLeft: + t3 = TextAlignLeft + case property.TextAlignCenter: + t3 = TextAlignCenter + case property.TextAlignRight: + t3 = TextAlignRight + case property.TextAlignJustify: + t3 = TextAlignJustify + case property.TextAlignJustifyAll: + t3 = TextAlignJustifyAll + default: + return nil + } + return &t3 +} + +func FromPropertyValueAndType(v interface{}, t ValueType) *property.Value { + switch v2 := v.(type) { + case *Camera: + v = property.Camera{ + Lat: v2.Lat, + Lng: v2.Lng, + Altitude: v2.Altitude, + Heading: v2.Heading, + Pitch: v2.Pitch, + Roll: v2.Roll, + FOV: v2.Fov, + } + case *Typography: + v = property.Typography{ + FontFamily: v2.FontFamily, + FontSize: v2.FontSize, + FontWeight: v2.FontWeight, + Color: v2.Color, + TextAlign: fromTextAlign(v2.TextAlign), + Bold: v2.Bold, + Italic: v2.Italic, + Underline: v2.Underline, + } + default: + v = gqlValueToValueInterface(v2) + } + return property.ValueType(FromValueType(t)).ValueFrom(v) +} + +func fromTextAlign(t *TextAlign) *property.TextAlign { + if t == nil { + return nil + } + var t2 property.TextAlign + switch *t { + case TextAlignLeft: + t2 = property.TextAlignLeft + case TextAlignCenter: + t2 = property.TextAlignCenter + case TextAlignRight: + t2 = property.TextAlignRight + case TextAlignJustify: + t2 = property.TextAlignJustify + case TextAlignJustifyAll: + t2 = property.TextAlignJustifyAll + default: + return nil + } + return &t2 +} + +func ToPropertyField(f *property.Field, parent *property.Property, gl *property.GroupList, g *property.Group) *PropertyField { + if f == nil { + return nil + } + + return &PropertyField{ + ID: propertyFieldID(parent, gl, g, f), + ParentID: IDFrom(parent.ID()), + SchemaID: IDFromPropertySchemaID(parent.Schema()), + FieldID: ID(f.Field()), + Value: ToPropertyValue(f.Value()), + Type: ToValueType(value.Type(f.Type())), + Links: util.Map(f.Links().Links(), ToPropertyFieldLink), + } +} + +func ToPropertyFieldLinks(flinks *property.Links) []*PropertyFieldLink { + if flinks == nil { + return nil + } + var links []*PropertyFieldLink + links = make([]*PropertyFieldLink, 0, flinks.Len()) + for _, l := range flinks.Links() { + links = append(links, ToPropertyFieldLink(l)) + } + return links +} + +func FromPropertyFieldLink(datasetSchema, ds, fields []ID) (*property.Links, error) { + if len(datasetSchema) != len(fields) || (ds != nil && len(ds) != len(fields) && len(ds) > 1) { + return nil, nil + } + + links := make([]*property.Link, 0, len(datasetSchema)) + for i, dss := range datasetSchema { + f := fields[i] + dsid, dsfid, err := ToID2[id.DatasetSchema, id.DatasetField](dss, f) + if err != nil { + return nil, err + } + if len(ds) == 0 || (len(ds) == 1 && i > 0) { + links = append(links, property.NewLinkFieldOnly(dsid, dsfid)) + } else { + did, err := ToID[id.Dataset](ds[i]) + if err != nil { + return nil, err + } + links = append(links, property.NewLink(did, dsid, dsfid)) + } + } + + return property.NewLinks(links), nil +} + +func ToPropertyFieldLink(link *property.Link) *PropertyFieldLink { + ds := link.DatasetSchema() + df := link.DatasetSchemaField() + if ds == nil || df == nil { + return nil + } + + return &PropertyFieldLink{ + DatasetID: IDFromRef(link.Dataset()), + DatasetSchemaID: IDFrom(*ds), + DatasetSchemaFieldID: IDFrom(*df), + } +} + +func ToProperty(property *property.Property) *Property { + if property == nil { + return nil + } + + pitems := property.Items() + items := make([]PropertyItem, 0, len(pitems)) + for _, i := range pitems { + items = append(items, ToPropertyItem(i, property, nil)) + } + + return &Property{ + ID: IDFrom(property.ID()), + SchemaID: IDFromPropertySchemaID(property.Schema()), + Items: items, + } +} + +func ToPropertySchema(propertySchema *property.Schema) *PropertySchema { + if propertySchema == nil { + return nil + } + + psid := propertySchema.ID() + return &PropertySchema{ + ID: IDFromPropertySchemaID(psid), + Groups: util.Map(propertySchema.Groups().Groups(), func(g *property.SchemaGroup) *PropertySchemaGroup { + return ToPropertySchemaGroup(g, psid) + }), + LinkableFields: ToPropertyLinkableFields(propertySchema.ID(), propertySchema.LinkableFields()), + } +} + +func ToPropertyLinkableFields(sid id.PropertySchemaID, l property.LinkableFields) *PropertyLinkableFields { + var latlng, url *id.PropertyFieldID + if l.LatLng != nil { + latlng = &l.LatLng.Field + } + if l.URL != nil { + url = &l.URL.Field + } + return &PropertyLinkableFields{ + SchemaID: IDFromPropertySchemaID(sid), + Latlng: IDFromStringRef(latlng), + URL: IDFromStringRef(url), + } +} + +func ToPropertySchemaField(f *property.SchemaField) *PropertySchemaField { + if f == nil { + return nil + } + + return &PropertySchemaField{ + FieldID: ID(f.ID()), + Type: ToValueType(value.Type(f.Type())), + Title: f.Title().String(), + Description: f.Description().String(), + Prefix: stringToRef(f.Prefix()), + Suffix: stringToRef(f.Suffix()), + DefaultValue: ToPropertyValue(f.DefaultValue()), + UI: ToPropertySchemaFieldUI(f.UI()), + Min: f.Min(), + Max: f.Max(), + Choices: util.Map(f.Choices(), func(c property.SchemaFieldChoice) *PropertySchemaFieldChoice { + return &PropertySchemaFieldChoice{ + Key: c.Key, + Title: c.Title.String(), + AllTranslatedTitle: c.Title, + Icon: stringToRef(c.Icon), + } + }), + IsAvailableIf: ToPropertyConditon(f.IsAvailableIf()), + AllTranslatedTitle: f.Title(), + AllTranslatedDescription: f.Description(), + } +} + +func ToPropertySchemaFieldUI(ui *property.SchemaFieldUI) *PropertySchemaFieldUI { + if ui == nil { + return nil + } + + ui2 := PropertySchemaFieldUI("") + switch *ui { + case property.SchemaFieldUIMultiline: + ui2 = PropertySchemaFieldUIMultiline + case property.SchemaFieldUISelection: + ui2 = PropertySchemaFieldUISelection + case property.SchemaFieldUIColor: + ui2 = PropertySchemaFieldUIColor + case property.SchemaFieldUIRange: + ui2 = PropertySchemaFieldUIRange + case property.SchemaFieldUISlider: + ui2 = PropertySchemaFieldUISlider + case property.SchemaFieldUIImage: + ui2 = PropertySchemaFieldUIImage + case property.SchemaFieldUIVideo: + ui2 = PropertySchemaFieldUIVideo + case property.SchemaFieldUIFile: + ui2 = PropertySchemaFieldUIFile + case property.SchemaFieldUILayer: + ui2 = PropertySchemaFieldUILayer + case property.SchemaFieldUICameraPose: + ui2 = PropertySchemaFieldUICameraPose + } + if ui2 != PropertySchemaFieldUI("") { + return &ui2 + } + return nil +} + +func ToMergedPropertyFromMetadata(m *property.MergedMetadata) *MergedProperty { + if m == nil { + return nil + } + + return &MergedProperty{ + OriginalID: IDFromRef(m.Original), + ParentID: IDFromRef(m.Parent), + LinkedDatasetID: IDFromRef(m.LinkedDataset), + Groups: nil, // resolved by graphql resolver + } +} + +func ToMergedProperty(m *property.Merged) *MergedProperty { + if m == nil { + return nil + } + + return &MergedProperty{ + OriginalID: IDFromRef(m.Original), + ParentID: IDFromRef(m.Parent), + SchemaID: IDFromPropertySchemaIDRef(m.Schema.Ref()), + LinkedDatasetID: IDFromRef(m.LinkedDataset), + Groups: util.Map(m.Groups, func(g *property.MergedGroup) *MergedPropertyGroup { + return ToMergedPropertyGroup(g, m) + }), + } +} + +func ToMergedPropertyGroup(f *property.MergedGroup, p *property.Merged) *MergedPropertyGroup { + if f == nil { + return nil + } + + return &MergedPropertyGroup{ + OriginalPropertyID: IDFromRef(p.Original), + ParentPropertyID: IDFromRef(p.Parent), + OriginalID: IDFromRef(f.Original), + SchemaGroupID: ID(f.SchemaGroup), + ParentID: IDFromRef(f.Parent), + SchemaID: IDFromPropertySchemaIDRef(p.Schema.Ref()), + LinkedDatasetID: IDFromRef(f.LinkedDataset), + Fields: util.Map(f.Fields, func(f *property.MergedField) *MergedPropertyField { + return ToMergedPropertyField(f, p.Schema) + }), + Groups: util.Map(f.Groups, func(g *property.MergedGroup) *MergedPropertyGroup { + return ToMergedPropertyGroup(g, p) + }), + } +} + +func ToMergedPropertyField(f *property.MergedField, s id.PropertySchemaID) *MergedPropertyField { + if f == nil { + return nil + } + + return &MergedPropertyField{ + FieldID: ID(f.ID), + SchemaID: IDFromPropertySchemaID(s), + Links: ToPropertyFieldLinks(f.Links), + Value: ToPropertyValue(f.Value), + Type: ToValueType(value.Type(f.Type)), + Overridden: f.Overridden, + } +} + +func ToPropertySchemaGroup(g *property.SchemaGroup, s property.SchemaID) *PropertySchemaGroup { + if g == nil { + return nil + } + + gfields := g.Fields() + fields := make([]*PropertySchemaField, 0, len(gfields)) + + var representativeField *PropertySchemaField + representativeFieldID := g.RepresentativeFieldID() + + for _, f := range gfields { + f2 := ToPropertySchemaField(f) + fields = append(fields, f2) + if representativeFieldID != nil && f.ID() == *representativeFieldID { + representativeField = f2 + } + } + + return &PropertySchemaGroup{ + SchemaGroupID: ID(g.ID()), + SchemaID: IDFromPropertySchemaID(s), + IsList: g.IsList(), + Title: g.Title().StringRef(), + Fields: fields, + RepresentativeFieldID: IDFromStringRef(representativeFieldID), + RepresentativeField: representativeField, + AllTranslatedTitle: g.Title(), + IsAvailableIf: ToPropertyConditon(g.IsAvailableIf()), + } +} + +func ToPropertyGroup(g *property.Group, p *property.Property, gl *property.GroupList) *PropertyGroup { + if g == nil { + return nil + } + + return &PropertyGroup{ + ID: IDFrom(g.ID()), + SchemaID: IDFromPropertySchemaID(p.Schema()), + SchemaGroupID: ID(g.SchemaGroup()), + Fields: util.Map(g.Fields(nil), func(f *property.Field) *PropertyField { + return ToPropertyField(f, p, gl, g) + }), + } +} + +func ToPropertyGroupList(gl *property.GroupList, p *property.Property) *PropertyGroupList { + if gl == nil { + return nil + } + + return &PropertyGroupList{ + ID: IDFrom(gl.ID()), + SchemaID: IDFromPropertySchemaID(p.Schema()), + SchemaGroupID: ID(gl.SchemaGroup()), + Groups: util.Map(gl.Groups(), func(g *property.Group) *PropertyGroup { + return ToPropertyGroup(g, p, gl) + }), + } +} + +func ToPropertyItem(i property.Item, p *property.Property, pgl *property.GroupList) PropertyItem { + if i == nil { + return nil + } + + if g := property.ToGroup(i); g != nil { + return ToPropertyGroup(g, p, pgl) + } else if gl := property.ToGroupList(i); gl != nil { + return ToPropertyGroupList(gl, p) + } + return nil +} + +func ToPropertyConditon(c *property.Condition) *PropertyCondition { + if c == nil { + return nil + } + + return &PropertyCondition{ + FieldID: ID(c.Field), + Value: ToPropertyValue(c.Value), + Type: ToValueType(value.Type(c.Value.Type())), + } +} + +func FromPointer(schemaItem *id.PropertySchemaGroupID, item *ID, field *id.PropertyFieldID) *property.Pointer { + return property.NewPointer(schemaItem, ToIDRef[id.PropertyItem](item), field) +} + +func ToPropertyLatLng(lat, lng *float64) *property.LatLng { + var latlng *property.LatLng + if lat != nil && lng != nil { + latlng2 := property.LatLng{Lat: *lat, Lng: *lng} + latlng = &latlng2 + } + return latlng +} + +func propertyFieldID(property *property.Property, groupList *property.GroupList, group *property.Group, field *property.Field) string { + if property == nil || group == nil || field == nil { + return "" + } + + const sep = "_" + var sb strings.Builder + sb.WriteString(property.ID().String()) + sb.WriteString(sep) + if groupList != nil { + sb.WriteString(groupList.ID().String()) + sb.WriteString(sep) + } + sb.WriteString(group.ID().String()) + sb.WriteString(sep) + sb.WriteString(field.Field().String()) + + return sb.String() +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_property_test.go b/server/internal/adapter/gql/gqlmodel/convert_property_test.go new file mode 100644 index 000000000..208178002 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_property_test.go @@ -0,0 +1,47 @@ +package gqlmodel + +import ( + "encoding/json" + "testing" + + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestFromPropertyValueAndType(t *testing.T) { + type args struct { + v interface{} + t ValueType + } + + tests := []struct { + name string + args args + want *property.Value + }{ + { + name: "number", + args: args{ + v: 1.1, + t: ValueTypeNumber, + }, + want: property.ValueTypeNumber.ValueFrom(1.1), + }, + { + name: "json number", + args: args{ + v: json.Number("1.1"), + t: ValueTypeNumber, + }, + want: property.ValueTypeNumber.ValueFrom(1.1), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, FromPropertyValueAndType(tt.args.v, tt.args.t)) + }) + } +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_scene.go b/server/internal/adapter/gql/gqlmodel/convert_scene.go new file mode 100644 index 000000000..a172d1ca2 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_scene.go @@ -0,0 +1,60 @@ +package gqlmodel + +import ( + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/util" +) + +func ToSceneWidget(w *scene.Widget) *SceneWidget { + if w == nil { + return nil + } + + return &SceneWidget{ + ID: IDFrom(w.ID()), + PluginID: IDFromPluginID(w.Plugin()), + ExtensionID: ID(w.Extension()), + PropertyID: IDFrom(w.Property()), + Enabled: w.Enabled(), + Extended: w.Extended(), + } +} + +func ToScenePlugin(sp *scene.Plugin) *ScenePlugin { + if sp == nil { + return nil + } + + return &ScenePlugin{ + PluginID: IDFromPluginID(sp.Plugin()), + PropertyID: IDFromRef(sp.Property()), + } +} + +func ToCluster(c *scene.Cluster) *Cluster { + return &Cluster{ + ID: IDFrom(c.ID()), + Name: c.Name(), + PropertyID: IDFrom(c.Property()), + } +} + +func ToScene(scene *scene.Scene) *Scene { + if scene == nil { + return nil + } + + return &Scene{ + ID: IDFrom(scene.ID()), + ProjectID: IDFrom(scene.Project()), + PropertyID: IDFrom(scene.Property()), + TeamID: IDFrom(scene.Team()), + RootLayerID: IDFrom(scene.RootLayer()), + CreatedAt: scene.CreatedAt(), + UpdatedAt: scene.UpdatedAt(), + Plugins: util.Map(scene.Plugins().Plugins(), ToScenePlugin), + Clusters: util.Map(scene.Clusters().Clusters(), ToCluster), + Widgets: util.Map(scene.Widgets().Widgets(), ToSceneWidget), + WidgetAlignSystem: ToWidgetAlignSystem(scene.Widgets().Alignment()), + } +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_scene_align.go b/server/internal/adapter/gql/gqlmodel/convert_scene_align.go new file mode 100644 index 000000000..644f2ed73 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_scene_align.go @@ -0,0 +1,124 @@ +package gqlmodel + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/util" +) + +func ToWidgetAlignSystem(sas *scene.WidgetAlignSystem) *WidgetAlignSystem { + widgetAlignDoc := WidgetAlignSystem{ + Inner: ToWidgetZone(sas.Zone(scene.WidgetZoneInner)), + Outer: ToWidgetZone(sas.Zone(scene.WidgetZoneOuter)), + } + return &widgetAlignDoc +} + +func ToWidgetZone(z *scene.WidgetZone) *WidgetZone { + if z == nil { + return nil + } + + return &WidgetZone{ + Left: ToWidgetSection(z.Section(scene.WidgetSectionLeft)), + Center: ToWidgetSection(z.Section(scene.WidgetSectionCenter)), + Right: ToWidgetSection(z.Section(scene.WidgetSectionRight)), + } +} + +func ToWidgetSection(s *scene.WidgetSection) *WidgetSection { + if s == nil { + return nil + } + + return &WidgetSection{ + Top: ToWidgetArea(s.Area(scene.WidgetAreaTop)), + Middle: ToWidgetArea(s.Area(scene.WidgetAreaMiddle)), + Bottom: ToWidgetArea(s.Area(scene.WidgetAreaBottom)), + } +} + +func ToWidgetArea(a *scene.WidgetArea) *WidgetArea { + if a == nil { + return nil + } + + return &WidgetArea{ + WidgetIds: util.Map(a.WidgetIDs(), IDFrom[id.Widget]), + Align: ToWidgetAlignType(a.Alignment()), + } +} + +func ToWidgetAlignType(s scene.WidgetAlignType) WidgetAreaAlign { + switch s { + case scene.WidgetAlignStart: + return WidgetAreaAlignStart + case scene.WidgetAlignCentered: + return WidgetAreaAlignCentered + case scene.WidgetAlignEnd: + return WidgetAreaAlignEnd + } + return "" +} + +func FromSceneWidgetLocation(l *WidgetLocationInput) *scene.WidgetLocation { + if l == nil { + return nil + } + + return &scene.WidgetLocation{ + Zone: FromSceneWidgetZoneType(l.Zone), + Section: FromSceneWidgetSectionType(l.Section), + Area: FromSceneWidgetAreaType(l.Area), + } +} + +func FromSceneWidgetZoneType(t WidgetZoneType) scene.WidgetZoneType { + switch t { + case WidgetZoneTypeInner: + return scene.WidgetZoneInner + case WidgetZoneTypeOuter: + return scene.WidgetZoneOuter + } + return "" +} + +func FromSceneWidgetSectionType(t WidgetSectionType) scene.WidgetSectionType { + switch t { + case WidgetSectionTypeLeft: + return scene.WidgetSectionLeft + case WidgetSectionTypeCenter: + return scene.WidgetSectionCenter + case WidgetSectionTypeRight: + return scene.WidgetSectionRight + } + return "" +} + +func FromSceneWidgetAreaType(t WidgetAreaType) scene.WidgetAreaType { + switch t { + case WidgetAreaTypeTop: + return scene.WidgetAreaTop + case WidgetAreaTypeMiddle: + return scene.WidgetAreaMiddle + case WidgetAreaTypeBottom: + return scene.WidgetAreaBottom + } + return "" +} + +func FromWidgetAlignType(a *WidgetAreaAlign) *scene.WidgetAlignType { + if a == nil { + return nil + } + var r scene.WidgetAlignType + switch *a { + case WidgetAreaAlignStart: + r = scene.WidgetAlignStart + case WidgetAreaAlignCentered: + r = scene.WidgetAlignCentered + case WidgetAreaAlignEnd: + r = scene.WidgetAlignEnd + } + return &r +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_tag.go b/server/internal/adapter/gql/gqlmodel/convert_tag.go new file mode 100644 index 000000000..71d95711a --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_tag.go @@ -0,0 +1,50 @@ +package gqlmodel + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" + "github.com/reearth/reearth-backend/pkg/util" +) + +func ToTagItem(ti *tag.Item) *TagItem { + if ti == nil { + return nil + } + + return &TagItem{ + ID: IDFrom(ti.ID()), + SceneID: IDFrom(ti.Scene()), + Label: ti.Label(), + ParentID: IDFromRef(ti.Parent()), + LinkedDatasetID: IDFromRef(ti.LinkedDatasetID()), + LinkedDatasetSchemaID: IDFromRef(ti.LinkedDatasetSchemaID()), + LinkedDatasetFieldID: IDFromRef(ti.LinkedDatasetFieldID()), + } +} + +func ToTagGroup(tg *tag.Group) *TagGroup { + if tg == nil { + return nil + } + + return &TagGroup{ + ID: IDFrom(tg.ID()), + SceneID: IDFrom(tg.Scene()), + Label: tg.Label(), + TagIds: util.Map(tg.Tags(), IDFrom[id.Tag]), + } +} + +func ToTag(t tag.Tag) Tag { + if t == nil { + return nil + } + + switch ty := t.(type) { + case *tag.Item: + return ToTagItem(ty) + case *tag.Group: + return ToTagGroup(ty) + } + return nil +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_user.go b/server/internal/adapter/gql/gqlmodel/convert_user.go new file mode 100644 index 000000000..4a4ceb68b --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_user.go @@ -0,0 +1,97 @@ +package gqlmodel + +import ( + "github.com/reearth/reearth-backend/pkg/user" + "github.com/reearth/reearth-backend/pkg/util" +) + +func ToUser(u *user.User) *User { + if u == nil { + return nil + } + + return &User{ + ID: IDFrom(u.ID()), + Name: u.Name(), + Email: u.Email(), + } +} + +func ToMe(u *user.User) *Me { + if u == nil { + return nil + } + + return &Me{ + ID: IDFrom(u.ID()), + Name: u.Name(), + Email: u.Email(), + Lang: u.Lang(), + Theme: Theme(u.Theme()), + MyTeamID: IDFrom(u.Team()), + Auths: util.Map(u.Auths(), func(a user.Auth) string { + return a.Provider + }), + } +} + +func ToTheme(t *Theme) *user.Theme { + if t == nil { + return nil + } + + th := user.ThemeDefault + switch *t { + case ThemeDark: + th = user.ThemeDark + case ThemeLight: + th = user.ThemeLight + } + return &th +} + +func ToTeam(t *user.Team) *Team { + if t == nil { + return nil + } + + memberMap := t.Members().Members() + members := make([]*TeamMember, 0, len(memberMap)) + for u, r := range memberMap { + members = append(members, &TeamMember{ + UserID: IDFrom(u), + Role: ToRole(r), + }) + } + + return &Team{ + ID: IDFrom(t.ID()), + Name: t.Name(), + Personal: t.IsPersonal(), + Members: members, + } +} + +func FromRole(r Role) user.Role { + switch r { + case RoleReader: + return user.RoleReader + case RoleWriter: + return user.RoleWriter + case RoleOwner: + return user.RoleOwner + } + return user.Role("") +} + +func ToRole(r user.Role) Role { + switch r { + case user.RoleReader: + return RoleReader + case user.RoleWriter: + return RoleWriter + case user.RoleOwner: + return RoleOwner + } + return Role("") +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_value.go b/server/internal/adapter/gql/gqlmodel/convert_value.go new file mode 100644 index 000000000..bf2ed7c54 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_value.go @@ -0,0 +1,191 @@ +package gqlmodel + +import ( + "net/url" + "strings" + + "github.com/reearth/reearth-backend/pkg/value" +) + +func valueInterfaceToGqlValue(v interface{}) interface{} { + if v == nil { + return nil + } + switch v2 := v.(type) { + case bool: + return v2 + case float64: + return v2 + case string: + return v2 + case *url.URL: + return v2.String() + case value.LatLng: + return LatLng{ + Lat: v2.Lat, + Lng: v2.Lng, + } + case value.LatLngHeight: + return LatLngHeight{ + Lat: v2.Lat, + Lng: v2.Lng, + Height: v2.Height, + } + case *value.LatLng: + return LatLng{ + Lat: v2.Lat, + Lng: v2.Lng, + } + case *value.LatLngHeight: + return LatLngHeight{ + Lat: v2.Lat, + Lng: v2.Lng, + Height: v2.Height, + } + case []value.LatLngHeight: + res := make([]LatLngHeight, 0, len(v2)) + for _, c := range v2 { + res = append(res, LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + return res + case [][]value.LatLngHeight: + res := make([][]LatLngHeight, 0, len(v2)) + for _, d := range v2 { + coord := make([]LatLngHeight, 0, len(d)) + for _, c := range d { + coord = append(coord, LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + res = append(res, coord) + } + return res + case *value.Rect: + return Rect{ + West: v2.West, + East: v2.East, + North: v2.North, + South: v2.South, + } + } + return nil +} + +func gqlValueToValueInterface(v interface{}) interface{} { + if v == nil { + return nil + } + switch v2 := v.(type) { + case LatLng: + return value.LatLng{ + Lat: v2.Lat, + Lng: v2.Lng, + } + case *LatLng: + if v2 == nil { + return nil + } + return value.LatLng{ + Lat: v2.Lat, + Lng: v2.Lng, + } + case LatLngHeight: + return value.LatLngHeight{ + Lat: v2.Lat, + Lng: v2.Lng, + Height: v2.Height, + } + case *LatLngHeight: + if v2 == nil { + return nil + } + return value.LatLngHeight{ + Lat: v2.Lat, + Lng: v2.Lng, + Height: v2.Height, + } + case []LatLngHeight: + res := make([]value.LatLngHeight, 0, len(v2)) + for _, c := range v2 { + res = append(res, value.LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + return value.Coordinates(res) + case []*LatLngHeight: + res := make([]value.LatLngHeight, 0, len(v2)) + for _, c := range v2 { + if c == nil { + continue + } + res = append(res, value.LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + return value.Coordinates(res) + case [][]LatLngHeight: + res := make([]value.Coordinates, 0, len(v2)) + for _, d := range v2 { + coord := make([]value.LatLngHeight, 0, len(d)) + for _, c := range d { + coord = append(coord, value.LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + res = append(res, coord) + } + return value.Polygon(res) + case [][]*LatLngHeight: + res := make([]value.Coordinates, 0, len(v2)) + for _, d := range v2 { + coord := make([]value.LatLngHeight, 0, len(d)) + for _, c := range d { + if c == nil { + continue + } + coord = append(coord, value.LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + res = append(res, coord) + } + return value.Polygon(res) + case Rect: + return value.Rect{ + West: v2.West, + East: v2.East, + North: v2.North, + South: v2.South, + } + case *Rect: + return value.Rect{ + West: v2.West, + East: v2.East, + North: v2.North, + South: v2.South, + } + } + return v +} + +func ToValueType(t value.Type) ValueType { + return ValueType(strings.ToUpper(string(t))) +} + +func FromValueType(t ValueType) value.Type { + return value.Type(strings.ToLower(string(t))) +} diff --git a/server/internal/adapter/gql/gqlmodel/convert_value_test.go b/server/internal/adapter/gql/gqlmodel/convert_value_test.go new file mode 100644 index 000000000..fac2f6098 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/convert_value_test.go @@ -0,0 +1,13 @@ +package gqlmodel + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/value" + "github.com/stretchr/testify/assert" +) + +func Test_FromValueType(t *testing.T) { + assert.Equal(t, value.TypeString, FromValueType(ValueTypeString)) + assert.Equal(t, value.TypeNumber, FromValueType(ValueTypeNumber)) +} diff --git a/server/internal/adapter/gql/gqlmodel/models.go b/server/internal/adapter/gql/gqlmodel/models.go new file mode 100644 index 000000000..9d8384b85 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/models.go @@ -0,0 +1,290 @@ +package gqlmodel + +import ( + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (LayerItem) IsNode() {} + +func (LayerGroup) IsNode() {} + +func (l *PropertyFieldLink) Copy() *PropertyFieldLink { + if l == nil { + return nil + } + return &PropertyFieldLink{ + DatasetID: l.DatasetID, + DatasetSchemaID: l.DatasetSchemaID, + DatasetSchemaFieldID: l.DatasetSchemaFieldID, + } +} + +func (d *Dataset) Field(id ID) *DatasetField { + if d == nil || id == "" { + return nil + } + for _, f := range d.Fields { + if f.FieldID == id { + return f + } + } + return nil +} + +func (d *DatasetSchema) Field(id ID) *DatasetSchemaField { + if d == nil || id == "" { + return nil + } + for _, f := range d.Fields { + if f.ID == id { + return f + } + } + return nil +} + +func (d *Property) Field(id id.PropertyFieldID) *PropertyField { + if d == nil || id == "" { + return nil + } + for _, g := range d.Items { + if gi, ok := g.(*PropertyGroup); ok { + for _, f := range gi.Fields { + if f.ID == string(id) { + return f + } + } + } + } + return nil +} + +func (d *PropertySchema) Field(id ID) *PropertySchemaField { + if d == nil || id == "" { + return nil + } + for _, g := range d.Groups { + for _, f := range g.Fields { + if f.FieldID == id { + return f + } + } + } + return nil +} + +func (d *Plugin) Extension(id ID) *PluginExtension { + if d == nil || id == "" { + return nil + } + for _, f := range d.Extensions { + if f.ExtensionID == id { + return f + } + } + return nil +} + +func (d *Infobox) Field(id ID) *InfoboxField { + if d == nil || id == "" { + return nil + } + for _, f := range d.Fields { + if f.ID == id { + return f + } + } + return nil +} + +func (d *MergedInfobox) Field(id ID) *MergedInfoboxField { + if d == nil || id == "" { + return nil + } + for _, f := range d.Fields { + if f.OriginalID == id { + return f + } + } + return nil +} + +func AttachParentLayer(layers []*Layer, parent ID) []Layer { + if layers == nil { + return nil + } + res := make([]Layer, 0, len(layers)) + for _, l := range layers { + if l == nil { + res = append(res, nil) + continue + } + l2 := *l + if l2 == nil { + res = append(res, nil) + continue + } + if li, ok := l2.(*LayerItem); ok { + li.ParentID = &parent + res = append(res, li) + } else if lg, ok := l2.(*LayerGroup); ok { + lg.ParentID = &parent + res = append(res, lg) + } + } + return res +} + +func NewEmptyPageInfo() *PageInfo { + return ToPageInfo(usecase.NewPageInfo(0, nil, nil, false, false)) +} + +func (d *PropertyGroup) Field(id ID) *PropertyField { + if d == nil || id == "" { + return nil + } + for _, f := range d.Fields { + if f.ID == string(id) { + return f + } + } + return nil +} + +func (d *PropertySchema) Group(id ID) *PropertySchemaGroup { + if d == nil || id == "" { + return nil + } + for _, f := range d.Groups { + if f.SchemaGroupID == id { + return f + } + } + return nil +} + +func (d *Property) Item(id ID) PropertyItem { + if d == nil || id == "" { + return nil + } + for _, f := range d.Items { + switch g := f.(type) { + case *PropertyGroup: + if g.ID == id { + return g + } + case *PropertyGroupList: + if g.ID == id { + return g + } + h := g.Group(id) + if h != nil { + return h + } + } + } + return nil +} + +func (d *PropertyGroupList) Group(id ID) *PropertyGroup { + if d == nil || id == "" { + return nil + } + for _, f := range d.Groups { + if f.ID == id { + return f + } + } + return nil +} + +func (d *MergedProperty) PropertyID() *ID { + if d.OriginalID != nil { + return d.OriginalID + } else if d.ParentID != nil { + return d.ParentID + } + return nil +} + +func (d *MergedProperty) GroupByOriginal(id ID) *MergedPropertyGroup { + if d == nil || id == "" { + return nil + } + for _, f := range d.Groups { + if f.OriginalID != nil && *f.OriginalID == id { + return f + } + } + return nil +} + +func (d *MergedProperty) GroupByParent(id ID) *MergedPropertyGroup { + if d == nil || id == "" { + return nil + } + for _, f := range d.Groups { + if f.ParentID != nil && *f.ParentID == id { + return f + } + } + return nil +} + +func (d *MergedPropertyGroup) PropertyID() *ID { + if d.OriginalID != nil { + return d.OriginalID + } else if d.ParentID != nil { + return d.ParentID + } + return nil +} + +func (d *MergedPropertyGroup) GroupByOriginal(id ID) *MergedPropertyGroup { + if d == nil || id == "" { + return nil + } + for _, f := range d.Groups { + if f.OriginalID != nil && *f.OriginalID == id { + return f + } + } + return nil +} + +func (d *MergedPropertyGroup) GroupByParent(id ID) *MergedPropertyGroup { + if d == nil || id == "" { + return nil + } + for _, f := range d.Groups { + if f.ParentID != nil && *f.ParentID == id { + return f + } + } + return nil +} + +func (s *Scene) Widget(pluginID, extensionID ID) *SceneWidget { + if s == nil { + return nil + } + for _, w := range s.Widgets { + if w.PluginID == pluginID && w.ExtensionID == extensionID { + return w + } + } + return nil +} + +func (s *Scene) Plugin(pluginID ID) *ScenePlugin { + if s == nil { + return nil + } + for _, p := range s.Plugins { + if p.PluginID == pluginID { + return p + } + } + return nil +} diff --git a/server/internal/adapter/gql/gqlmodel/models_gen.go b/server/internal/adapter/gql/gqlmodel/models_gen.go new file mode 100644 index 000000000..965599ea3 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/models_gen.go @@ -0,0 +1,2119 @@ +// Code generated by github.com/99designs/gqlgen, DO NOT EDIT. + +package gqlmodel + +import ( + "fmt" + "io" + "net/url" + "strconv" + "time" + + "github.com/99designs/gqlgen/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "golang.org/x/text/language" +) + +type Layer interface { + IsLayer() +} + +type LayerTag interface { + IsLayerTag() +} + +type Node interface { + IsNode() +} + +type PropertyItem interface { + IsPropertyItem() +} + +type Tag interface { + IsTag() +} + +type AddClusterInput struct { + SceneID ID `json:"sceneId"` + Name string `json:"name"` +} + +type AddClusterPayload struct { + Scene *Scene `json:"scene"` + Cluster *Cluster `json:"cluster"` +} + +type AddDatasetSchemaInput struct { + SceneID ID `json:"sceneId"` + Name string `json:"name"` + Representativefield *ID `json:"representativefield"` +} + +type AddDatasetSchemaPayload struct { + DatasetSchema *DatasetSchema `json:"datasetSchema"` +} + +type AddDynamicDatasetInput struct { + DatasetSchemaID ID `json:"datasetSchemaId"` + Author string `json:"author"` + Content string `json:"content"` + Lat *float64 `json:"lat"` + Lng *float64 `json:"lng"` + Target *string `json:"target"` +} + +type AddDynamicDatasetPayload struct { + DatasetSchema *DatasetSchema `json:"datasetSchema"` + Dataset *Dataset `json:"dataset"` +} + +type AddDynamicDatasetSchemaInput struct { + SceneID ID `json:"sceneId"` +} + +type AddDynamicDatasetSchemaPayload struct { + DatasetSchema *DatasetSchema `json:"datasetSchema"` +} + +type AddInfoboxFieldInput struct { + LayerID ID `json:"layerId"` + PluginID ID `json:"pluginId"` + ExtensionID ID `json:"extensionId"` + Index *int `json:"index"` +} + +type AddInfoboxFieldPayload struct { + InfoboxField *InfoboxField `json:"infoboxField"` + Layer Layer `json:"layer"` +} + +type AddLayerGroupInput struct { + ParentLayerID ID `json:"parentLayerId"` + PluginID *ID `json:"pluginId"` + ExtensionID *ID `json:"extensionId"` + Index *int `json:"index"` + LinkedDatasetSchemaID *ID `json:"linkedDatasetSchemaID"` + Name *string `json:"name"` + RepresentativeFieldID *ID `json:"representativeFieldId"` +} + +type AddLayerGroupPayload struct { + Layer *LayerGroup `json:"layer"` + ParentLayer *LayerGroup `json:"parentLayer"` + Index *int `json:"index"` +} + +type AddLayerItemInput struct { + ParentLayerID ID `json:"parentLayerId"` + PluginID ID `json:"pluginId"` + ExtensionID ID `json:"extensionId"` + Index *int `json:"index"` + Name *string `json:"name"` + Lat *float64 `json:"lat"` + Lng *float64 `json:"lng"` +} + +type AddLayerItemPayload struct { + Layer *LayerItem `json:"layer"` + ParentLayer *LayerGroup `json:"parentLayer"` + Index *int `json:"index"` +} + +type AddMemberToTeamInput struct { + TeamID ID `json:"teamId"` + UserID ID `json:"userId"` + Role Role `json:"role"` +} + +type AddMemberToTeamPayload struct { + Team *Team `json:"team"` +} + +type AddPropertyItemInput struct { + PropertyID ID `json:"propertyId"` + SchemaGroupID ID `json:"schemaGroupId"` + Index *int `json:"index"` + NameFieldValue interface{} `json:"nameFieldValue"` + NameFieldType *ValueType `json:"nameFieldType"` +} + +type AddWidgetInput struct { + SceneID ID `json:"sceneId"` + PluginID ID `json:"pluginId"` + ExtensionID ID `json:"extensionId"` +} + +type AddWidgetPayload struct { + Scene *Scene `json:"scene"` + SceneWidget *SceneWidget `json:"sceneWidget"` +} + +type Asset struct { + ID ID `json:"id"` + CreatedAt time.Time `json:"createdAt"` + TeamID ID `json:"teamId"` + Name string `json:"name"` + Size int64 `json:"size"` + URL string `json:"url"` + ContentType string `json:"contentType"` + Team *Team `json:"team"` +} + +func (Asset) IsNode() {} + +type AssetConnection struct { + Edges []*AssetEdge `json:"edges"` + Nodes []*Asset `json:"nodes"` + PageInfo *PageInfo `json:"pageInfo"` + TotalCount int `json:"totalCount"` +} + +type AssetEdge struct { + Cursor usecase.Cursor `json:"cursor"` + Node *Asset `json:"node"` +} + +type AttachTagItemToGroupInput struct { + ItemID ID `json:"itemID"` + GroupID ID `json:"groupID"` +} + +type AttachTagItemToGroupPayload struct { + Tag *TagGroup `json:"tag"` +} + +type AttachTagToLayerInput struct { + TagID ID `json:"tagID"` + LayerID ID `json:"layerID"` +} + +type AttachTagToLayerPayload struct { + Layer Layer `json:"layer"` +} + +type Camera struct { + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` + Altitude float64 `json:"altitude"` + Heading float64 `json:"heading"` + Pitch float64 `json:"pitch"` + Roll float64 `json:"roll"` + Fov float64 `json:"fov"` +} + +type Cluster struct { + ID ID `json:"id"` + Name string `json:"name"` + PropertyID ID `json:"propertyId"` + Property *Property `json:"property"` +} + +type CreateAssetInput struct { + TeamID ID `json:"teamId"` + File graphql.Upload `json:"file"` +} + +type CreateAssetPayload struct { + Asset *Asset `json:"asset"` +} + +type CreateInfoboxInput struct { + LayerID ID `json:"layerId"` +} + +type CreateInfoboxPayload struct { + Layer Layer `json:"layer"` +} + +type CreateProjectInput struct { + TeamID ID `json:"teamId"` + Visualizer Visualizer `json:"visualizer"` + Name *string `json:"name"` + Description *string `json:"description"` + ImageURL *url.URL `json:"imageUrl"` + Alias *string `json:"alias"` + Archived *bool `json:"archived"` +} + +type CreateSceneInput struct { + ProjectID ID `json:"projectId"` +} + +type CreateScenePayload struct { + Scene *Scene `json:"scene"` +} + +type CreateTagGroupInput struct { + SceneID ID `json:"sceneId"` + Label string `json:"label"` + Tags []ID `json:"tags"` +} + +type CreateTagGroupPayload struct { + Tag *TagGroup `json:"tag"` +} + +type CreateTagItemInput struct { + SceneID ID `json:"sceneId"` + Label string `json:"label"` + Parent *ID `json:"parent"` + LinkedDatasetSchemaID *ID `json:"linkedDatasetSchemaID"` + LinkedDatasetID *ID `json:"linkedDatasetID"` + LinkedDatasetField *ID `json:"linkedDatasetField"` +} + +type CreateTagItemPayload struct { + Tag *TagItem `json:"tag"` + Parent *TagGroup `json:"parent"` +} + +type CreateTeamInput struct { + Name string `json:"name"` +} + +type CreateTeamPayload struct { + Team *Team `json:"team"` +} + +type Dataset struct { + ID ID `json:"id"` + Source string `json:"source"` + SchemaID ID `json:"schemaId"` + Fields []*DatasetField `json:"fields"` + Schema *DatasetSchema `json:"schema"` + Name *string `json:"name"` +} + +func (Dataset) IsNode() {} + +type DatasetConnection struct { + Edges []*DatasetEdge `json:"edges"` + Nodes []*Dataset `json:"nodes"` + PageInfo *PageInfo `json:"pageInfo"` + TotalCount int `json:"totalCount"` +} + +type DatasetEdge struct { + Cursor usecase.Cursor `json:"cursor"` + Node *Dataset `json:"node"` +} + +type DatasetField struct { + FieldID ID `json:"fieldId"` + SchemaID ID `json:"schemaId"` + Source string `json:"source"` + Type ValueType `json:"type"` + Value interface{} `json:"value"` + Schema *DatasetSchema `json:"schema"` + Field *DatasetSchemaField `json:"field"` + ValueRef *Dataset `json:"valueRef"` +} + +type DatasetSchema struct { + ID ID `json:"id"` + Source string `json:"source"` + Name string `json:"name"` + SceneID ID `json:"sceneId"` + Fields []*DatasetSchemaField `json:"fields"` + TotalCount int `json:"totalCount"` + RepresentativeFieldID *ID `json:"representativeFieldId"` + Dynamic *bool `json:"dynamic"` + Datasets *DatasetConnection `json:"datasets"` + Scene *Scene `json:"scene"` + RepresentativeField *DatasetSchemaField `json:"representativeField"` +} + +func (DatasetSchema) IsNode() {} + +type DatasetSchemaConnection struct { + Edges []*DatasetSchemaEdge `json:"edges"` + Nodes []*DatasetSchema `json:"nodes"` + PageInfo *PageInfo `json:"pageInfo"` + TotalCount int `json:"totalCount"` +} + +type DatasetSchemaEdge struct { + Cursor usecase.Cursor `json:"cursor"` + Node *DatasetSchema `json:"node"` +} + +type DatasetSchemaField struct { + ID ID `json:"id"` + Source string `json:"source"` + Name string `json:"name"` + Type ValueType `json:"type"` + SchemaID ID `json:"schemaId"` + RefID *ID `json:"refId"` + Schema *DatasetSchema `json:"schema"` + Ref *DatasetSchema `json:"ref"` +} + +func (DatasetSchemaField) IsNode() {} + +type DeleteMeInput struct { + UserID ID `json:"userId"` +} + +type DeleteMePayload struct { + UserID ID `json:"userId"` +} + +type DeleteProjectInput struct { + ProjectID ID `json:"projectId"` +} + +type DeleteProjectPayload struct { + ProjectID ID `json:"projectId"` +} + +type DeleteTeamInput struct { + TeamID ID `json:"teamId"` +} + +type DeleteTeamPayload struct { + TeamID ID `json:"teamId"` +} + +type DetachTagFromLayerInput struct { + TagID ID `json:"tagID"` + LayerID ID `json:"layerID"` +} + +type DetachTagFromLayerPayload struct { + Layer Layer `json:"layer"` +} + +type DetachTagItemFromGroupInput struct { + ItemID ID `json:"itemID"` + GroupID ID `json:"groupID"` +} + +type DetachTagItemFromGroupPayload struct { + Tag *TagGroup `json:"tag"` +} + +type ImportDatasetFromGoogleSheetInput struct { + AccessToken string `json:"accessToken"` + FileID string `json:"fileId"` + SheetName string `json:"sheetName"` + SceneID ID `json:"sceneId"` + DatasetSchemaID *ID `json:"datasetSchemaId"` +} + +type ImportDatasetInput struct { + File graphql.Upload `json:"file"` + SceneID ID `json:"sceneId"` + DatasetSchemaID *ID `json:"datasetSchemaId"` +} + +type ImportDatasetPayload struct { + DatasetSchema *DatasetSchema `json:"datasetSchema"` +} + +type ImportLayerInput struct { + LayerID ID `json:"layerId"` + File graphql.Upload `json:"file"` + Format LayerEncodingFormat `json:"format"` +} + +type ImportLayerPayload struct { + Layers []Layer `json:"layers"` + ParentLayer *LayerGroup `json:"parentLayer"` +} + +type Infobox struct { + SceneID ID `json:"sceneId"` + LayerID ID `json:"layerId"` + PropertyID ID `json:"propertyId"` + Fields []*InfoboxField `json:"fields"` + LinkedDatasetID *ID `json:"linkedDatasetId"` + Layer Layer `json:"layer"` + Property *Property `json:"property"` + LinkedDataset *Dataset `json:"linkedDataset"` + Merged *MergedInfobox `json:"merged"` + Scene *Scene `json:"scene"` +} + +type InfoboxField struct { + ID ID `json:"id"` + SceneID ID `json:"sceneId"` + LayerID ID `json:"layerId"` + PropertyID ID `json:"propertyId"` + PluginID ID `json:"pluginId"` + ExtensionID ID `json:"extensionId"` + LinkedDatasetID *ID `json:"linkedDatasetId"` + Layer Layer `json:"layer"` + Infobox *Infobox `json:"infobox"` + Property *Property `json:"property"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + LinkedDataset *Dataset `json:"linkedDataset"` + Merged *MergedInfoboxField `json:"merged"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` +} + +type InstallPluginInput struct { + SceneID ID `json:"sceneId"` + PluginID ID `json:"pluginId"` +} + +type InstallPluginPayload struct { + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` +} + +type LatLng struct { + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` +} + +type LatLngHeight struct { + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` + Height float64 `json:"height"` +} + +type LayerGroup struct { + ID ID `json:"id"` + SceneID ID `json:"sceneId"` + Name string `json:"name"` + IsVisible bool `json:"isVisible"` + PropertyID *ID `json:"propertyId"` + PluginID *ID `json:"pluginId"` + ExtensionID *ID `json:"extensionId"` + Infobox *Infobox `json:"infobox"` + ParentID *ID `json:"parentId"` + LinkedDatasetSchemaID *ID `json:"linkedDatasetSchemaId"` + Root bool `json:"root"` + LayerIds []ID `json:"layerIds"` + Tags []LayerTag `json:"tags"` + Parent *LayerGroup `json:"parent"` + Property *Property `json:"property"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + LinkedDatasetSchema *DatasetSchema `json:"linkedDatasetSchema"` + Layers []Layer `json:"layers"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` +} + +func (LayerGroup) IsLayer() {} + +type LayerItem struct { + ID ID `json:"id"` + SceneID ID `json:"sceneId"` + Name string `json:"name"` + IsVisible bool `json:"isVisible"` + PropertyID *ID `json:"propertyId"` + PluginID *ID `json:"pluginId"` + ExtensionID *ID `json:"extensionId"` + Infobox *Infobox `json:"infobox"` + ParentID *ID `json:"parentId"` + LinkedDatasetID *ID `json:"linkedDatasetId"` + Tags []LayerTag `json:"tags"` + Parent *LayerGroup `json:"parent"` + Property *Property `json:"property"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + LinkedDataset *Dataset `json:"linkedDataset"` + Merged *MergedLayer `json:"merged"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` +} + +func (LayerItem) IsLayer() {} + +type LayerTagGroup struct { + TagID ID `json:"tagId"` + Children []*LayerTagItem `json:"children"` + Tag Tag `json:"tag"` +} + +func (LayerTagGroup) IsLayerTag() {} + +type LayerTagItem struct { + TagID ID `json:"tagId"` + Tag Tag `json:"tag"` +} + +func (LayerTagItem) IsLayerTag() {} + +type LinkDatasetToPropertyValueInput struct { + PropertyID ID `json:"propertyId"` + SchemaGroupID *ID `json:"schemaGroupId"` + ItemID *ID `json:"itemId"` + FieldID ID `json:"fieldId"` + DatasetSchemaIds []ID `json:"datasetSchemaIds"` + DatasetSchemaFieldIds []ID `json:"datasetSchemaFieldIds"` + DatasetIds []ID `json:"datasetIds"` +} + +type Me struct { + ID ID `json:"id"` + Name string `json:"name"` + Email string `json:"email"` + Lang language.Tag `json:"lang"` + Theme Theme `json:"theme"` + MyTeamID ID `json:"myTeamId"` + Auths []string `json:"auths"` + Teams []*Team `json:"teams"` + MyTeam *Team `json:"myTeam"` +} + +type MergedInfobox struct { + SceneID ID `json:"sceneID"` + Property *MergedProperty `json:"property"` + Fields []*MergedInfoboxField `json:"fields"` + Scene *Scene `json:"scene"` +} + +type MergedInfoboxField struct { + OriginalID ID `json:"originalId"` + SceneID ID `json:"sceneID"` + PluginID ID `json:"pluginId"` + ExtensionID ID `json:"extensionId"` + Property *MergedProperty `json:"property"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` +} + +type MergedLayer struct { + OriginalID ID `json:"originalId"` + ParentID *ID `json:"parentId"` + SceneID ID `json:"sceneID"` + Property *MergedProperty `json:"property"` + Infobox *MergedInfobox `json:"infobox"` + Original *LayerItem `json:"original"` + Parent *LayerGroup `json:"parent"` + Scene *Scene `json:"scene"` +} + +type MergedProperty struct { + OriginalID *ID `json:"originalId"` + ParentID *ID `json:"parentId"` + SchemaID *ID `json:"schemaId"` + LinkedDatasetID *ID `json:"linkedDatasetId"` + Original *Property `json:"original"` + Parent *Property `json:"parent"` + Schema *PropertySchema `json:"schema"` + LinkedDataset *Dataset `json:"linkedDataset"` + Groups []*MergedPropertyGroup `json:"groups"` +} + +type MergedPropertyField struct { + SchemaID ID `json:"schemaId"` + FieldID ID `json:"fieldId"` + Value interface{} `json:"value"` + Type ValueType `json:"type"` + Links []*PropertyFieldLink `json:"links"` + Overridden bool `json:"overridden"` + Schema *PropertySchema `json:"schema"` + Field *PropertySchemaField `json:"field"` + ActualValue interface{} `json:"actualValue"` +} + +type MergedPropertyGroup struct { + OriginalPropertyID *ID `json:"originalPropertyId"` + ParentPropertyID *ID `json:"parentPropertyId"` + OriginalID *ID `json:"originalId"` + ParentID *ID `json:"parentId"` + SchemaGroupID ID `json:"schemaGroupId"` + SchemaID *ID `json:"schemaId"` + LinkedDatasetID *ID `json:"linkedDatasetId"` + Fields []*MergedPropertyField `json:"fields"` + Groups []*MergedPropertyGroup `json:"groups"` + OriginalProperty *Property `json:"originalProperty"` + ParentProperty *Property `json:"parentProperty"` + Original *PropertyGroup `json:"original"` + Parent *PropertyGroup `json:"parent"` + Schema *PropertySchema `json:"schema"` + LinkedDataset *Dataset `json:"linkedDataset"` +} + +type MoveInfoboxFieldInput struct { + LayerID ID `json:"layerId"` + InfoboxFieldID ID `json:"infoboxFieldId"` + Index int `json:"index"` +} + +type MoveInfoboxFieldPayload struct { + InfoboxFieldID ID `json:"infoboxFieldId"` + Layer Layer `json:"layer"` + Index int `json:"index"` +} + +type MoveLayerInput struct { + LayerID ID `json:"layerId"` + DestLayerID *ID `json:"destLayerId"` + Index *int `json:"index"` +} + +type MoveLayerPayload struct { + LayerID ID `json:"layerId"` + FromParentLayer *LayerGroup `json:"fromParentLayer"` + ToParentLayer *LayerGroup `json:"toParentLayer"` + Index int `json:"index"` +} + +type MovePropertyItemInput struct { + PropertyID ID `json:"propertyId"` + SchemaGroupID ID `json:"schemaGroupId"` + ItemID ID `json:"itemId"` + Index int `json:"index"` +} + +type PageInfo struct { + StartCursor *usecase.Cursor `json:"startCursor"` + EndCursor *usecase.Cursor `json:"endCursor"` + HasNextPage bool `json:"hasNextPage"` + HasPreviousPage bool `json:"hasPreviousPage"` +} + +type Pagination struct { + First *int `json:"first"` + Last *int `json:"last"` + After *usecase.Cursor `json:"after"` + Before *usecase.Cursor `json:"before"` +} + +type Plugin struct { + ID ID `json:"id"` + SceneID *ID `json:"sceneId"` + Name string `json:"name"` + Version string `json:"version"` + Description string `json:"description"` + Author string `json:"author"` + RepositoryURL string `json:"repositoryUrl"` + PropertySchemaID *ID `json:"propertySchemaId"` + Extensions []*PluginExtension `json:"extensions"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` + AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` + AllTranslatedName map[string]string `json:"allTranslatedName"` + Scene *Scene `json:"scene"` + TranslatedName string `json:"translatedName"` + TranslatedDescription string `json:"translatedDescription"` + PropertySchema *PropertySchema `json:"propertySchema"` +} + +type PluginExtension struct { + ExtensionID ID `json:"extensionId"` + PluginID ID `json:"pluginId"` + Type PluginExtensionType `json:"type"` + Name string `json:"name"` + Description string `json:"description"` + Icon string `json:"icon"` + SingleOnly *bool `json:"singleOnly"` + WidgetLayout *WidgetLayout `json:"widgetLayout"` + Visualizer *Visualizer `json:"visualizer"` + PropertySchemaID ID `json:"propertySchemaId"` + AllTranslatedName map[string]string `json:"allTranslatedName"` + AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` + Plugin *Plugin `json:"plugin"` + SceneWidget *SceneWidget `json:"sceneWidget"` + PropertySchema *PropertySchema `json:"propertySchema"` + TranslatedName string `json:"translatedName"` + TranslatedDescription string `json:"translatedDescription"` +} + +type Project struct { + ID ID `json:"id"` + IsArchived bool `json:"isArchived"` + IsBasicAuthActive bool `json:"isBasicAuthActive"` + BasicAuthUsername string `json:"basicAuthUsername"` + BasicAuthPassword string `json:"basicAuthPassword"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + PublishedAt *time.Time `json:"publishedAt"` + Name string `json:"name"` + Description string `json:"description"` + Alias string `json:"alias"` + PublicTitle string `json:"publicTitle"` + PublicDescription string `json:"publicDescription"` + PublicImage string `json:"publicImage"` + PublicNoIndex bool `json:"publicNoIndex"` + ImageURL *url.URL `json:"imageUrl"` + TeamID ID `json:"teamId"` + Visualizer Visualizer `json:"visualizer"` + PublishmentStatus PublishmentStatus `json:"publishmentStatus"` + Team *Team `json:"team"` + Scene *Scene `json:"scene"` +} + +func (Project) IsNode() {} + +type ProjectAliasAvailability struct { + Alias string `json:"alias"` + Available bool `json:"available"` +} + +type ProjectConnection struct { + Edges []*ProjectEdge `json:"edges"` + Nodes []*Project `json:"nodes"` + PageInfo *PageInfo `json:"pageInfo"` + TotalCount int `json:"totalCount"` +} + +type ProjectEdge struct { + Cursor usecase.Cursor `json:"cursor"` + Node *Project `json:"node"` +} + +type ProjectPayload struct { + Project *Project `json:"project"` +} + +type Property struct { + ID ID `json:"id"` + SchemaID ID `json:"schemaId"` + Items []PropertyItem `json:"items"` + Schema *PropertySchema `json:"schema"` + Layer Layer `json:"layer"` + Merged *MergedProperty `json:"merged"` +} + +func (Property) IsNode() {} + +type PropertyCondition struct { + FieldID ID `json:"fieldId"` + Type ValueType `json:"type"` + Value interface{} `json:"value"` +} + +type PropertyField struct { + ID string `json:"id"` + ParentID ID `json:"parentId"` + SchemaID ID `json:"schemaId"` + FieldID ID `json:"fieldId"` + Links []*PropertyFieldLink `json:"links"` + Type ValueType `json:"type"` + Value interface{} `json:"value"` + Parent *Property `json:"parent"` + Schema *PropertySchema `json:"schema"` + Field *PropertySchemaField `json:"field"` + ActualValue interface{} `json:"actualValue"` +} + +type PropertyFieldLink struct { + DatasetID *ID `json:"datasetId"` + DatasetSchemaID ID `json:"datasetSchemaId"` + DatasetSchemaFieldID ID `json:"datasetSchemaFieldId"` + Dataset *Dataset `json:"dataset"` + DatasetField *DatasetField `json:"datasetField"` + DatasetSchema *DatasetSchema `json:"datasetSchema"` + DatasetSchemaField *DatasetSchemaField `json:"datasetSchemaField"` +} + +type PropertyFieldPayload struct { + Property *Property `json:"property"` + PropertyField *PropertyField `json:"propertyField"` +} + +type PropertyGroup struct { + ID ID `json:"id"` + SchemaID ID `json:"schemaId"` + SchemaGroupID ID `json:"schemaGroupId"` + Fields []*PropertyField `json:"fields"` + Schema *PropertySchema `json:"schema"` + SchemaGroup *PropertySchemaGroup `json:"schemaGroup"` +} + +func (PropertyGroup) IsPropertyItem() {} + +type PropertyGroupList struct { + ID ID `json:"id"` + SchemaID ID `json:"schemaId"` + SchemaGroupID ID `json:"schemaGroupId"` + Groups []*PropertyGroup `json:"groups"` + Schema *PropertySchema `json:"schema"` + SchemaGroup *PropertySchemaGroup `json:"schemaGroup"` +} + +func (PropertyGroupList) IsPropertyItem() {} + +type PropertyItemPayload struct { + Property *Property `json:"property"` + PropertyItem PropertyItem `json:"propertyItem"` +} + +type PropertyLinkableFields struct { + SchemaID ID `json:"schemaId"` + Latlng *ID `json:"latlng"` + URL *ID `json:"url"` + LatlngField *PropertySchemaField `json:"latlngField"` + URLField *PropertySchemaField `json:"urlField"` + Schema *PropertySchema `json:"schema"` +} + +type PropertySchema struct { + ID ID `json:"id"` + Groups []*PropertySchemaGroup `json:"groups"` + LinkableFields *PropertyLinkableFields `json:"linkableFields"` +} + +type PropertySchemaField struct { + FieldID ID `json:"fieldId"` + Type ValueType `json:"type"` + Title string `json:"title"` + Description string `json:"description"` + Prefix *string `json:"prefix"` + Suffix *string `json:"suffix"` + DefaultValue interface{} `json:"defaultValue"` + UI *PropertySchemaFieldUI `json:"ui"` + Min *float64 `json:"min"` + Max *float64 `json:"max"` + Choices []*PropertySchemaFieldChoice `json:"choices"` + IsAvailableIf *PropertyCondition `json:"isAvailableIf"` + AllTranslatedTitle map[string]string `json:"allTranslatedTitle"` + AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` + TranslatedTitle string `json:"translatedTitle"` + TranslatedDescription string `json:"translatedDescription"` +} + +type PropertySchemaFieldChoice struct { + Key string `json:"key"` + Title string `json:"title"` + Icon *string `json:"icon"` + AllTranslatedTitle map[string]string `json:"allTranslatedTitle"` + TranslatedTitle string `json:"translatedTitle"` +} + +type PropertySchemaGroup struct { + SchemaGroupID ID `json:"schemaGroupId"` + SchemaID ID `json:"schemaId"` + Fields []*PropertySchemaField `json:"fields"` + IsList bool `json:"isList"` + IsAvailableIf *PropertyCondition `json:"isAvailableIf"` + Title *string `json:"title"` + AllTranslatedTitle map[string]string `json:"allTranslatedTitle"` + RepresentativeFieldID *ID `json:"representativeFieldId"` + RepresentativeField *PropertySchemaField `json:"representativeField"` + Schema *PropertySchema `json:"schema"` + TranslatedTitle string `json:"translatedTitle"` +} + +type PublishProjectInput struct { + ProjectID ID `json:"projectId"` + Alias *string `json:"alias"` + Status PublishmentStatus `json:"status"` +} + +type Rect struct { + West float64 `json:"west"` + South float64 `json:"south"` + East float64 `json:"east"` + North float64 `json:"north"` +} + +type RemoveAssetInput struct { + AssetID ID `json:"assetId"` +} + +type RemoveAssetPayload struct { + AssetID ID `json:"assetId"` +} + +type RemoveClusterInput struct { + ClusterID ID `json:"clusterId"` + SceneID ID `json:"sceneId"` +} + +type RemoveClusterPayload struct { + Scene *Scene `json:"scene"` + ClusterID ID `json:"clusterId"` +} + +type RemoveDatasetSchemaInput struct { + SchemaID ID `json:"schemaId"` + Force *bool `json:"force"` +} + +type RemoveDatasetSchemaPayload struct { + SchemaID ID `json:"schemaId"` +} + +type RemoveInfoboxFieldInput struct { + LayerID ID `json:"layerId"` + InfoboxFieldID ID `json:"infoboxFieldId"` +} + +type RemoveInfoboxFieldPayload struct { + InfoboxFieldID ID `json:"infoboxFieldId"` + Layer Layer `json:"layer"` +} + +type RemoveInfoboxInput struct { + LayerID ID `json:"layerId"` +} + +type RemoveInfoboxPayload struct { + Layer Layer `json:"layer"` +} + +type RemoveLayerInput struct { + LayerID ID `json:"layerId"` +} + +type RemoveLayerPayload struct { + LayerID ID `json:"layerId"` + ParentLayer *LayerGroup `json:"parentLayer"` +} + +type RemoveMemberFromTeamInput struct { + TeamID ID `json:"teamId"` + UserID ID `json:"userId"` +} + +type RemoveMemberFromTeamPayload struct { + Team *Team `json:"team"` +} + +type RemoveMyAuthInput struct { + Auth string `json:"auth"` +} + +type RemovePropertyFieldInput struct { + PropertyID ID `json:"propertyId"` + SchemaGroupID *ID `json:"schemaGroupId"` + ItemID *ID `json:"itemId"` + FieldID ID `json:"fieldId"` +} + +type RemovePropertyItemInput struct { + PropertyID ID `json:"propertyId"` + SchemaGroupID ID `json:"schemaGroupId"` + ItemID ID `json:"itemId"` +} + +type RemoveTagInput struct { + TagID ID `json:"tagID"` +} + +type RemoveTagPayload struct { + TagID ID `json:"tagId"` + UpdatedLayers []Layer `json:"updatedLayers"` +} + +type RemoveWidgetInput struct { + SceneID ID `json:"sceneId"` + WidgetID ID `json:"widgetId"` +} + +type RemoveWidgetPayload struct { + Scene *Scene `json:"scene"` + WidgetID ID `json:"widgetId"` +} + +type Scene struct { + ID ID `json:"id"` + ProjectID ID `json:"projectId"` + TeamID ID `json:"teamId"` + PropertyID ID `json:"propertyId"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + RootLayerID ID `json:"rootLayerId"` + Widgets []*SceneWidget `json:"widgets"` + Plugins []*ScenePlugin `json:"plugins"` + WidgetAlignSystem *WidgetAlignSystem `json:"widgetAlignSystem"` + DynamicDatasetSchemas []*DatasetSchema `json:"dynamicDatasetSchemas"` + Project *Project `json:"project"` + Team *Team `json:"team"` + Property *Property `json:"property"` + RootLayer *LayerGroup `json:"rootLayer"` + DatasetSchemas *DatasetSchemaConnection `json:"datasetSchemas"` + TagIds []ID `json:"tagIds"` + Tags []Tag `json:"tags"` + Clusters []*Cluster `json:"clusters"` +} + +func (Scene) IsNode() {} + +type ScenePlugin struct { + PluginID ID `json:"pluginId"` + PropertyID *ID `json:"propertyId"` + Plugin *Plugin `json:"plugin"` + Property *Property `json:"property"` +} + +type SceneWidget struct { + ID ID `json:"id"` + PluginID ID `json:"pluginId"` + ExtensionID ID `json:"extensionId"` + PropertyID ID `json:"propertyId"` + Enabled bool `json:"enabled"` + Extended bool `json:"extended"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + Property *Property `json:"property"` +} + +type SignupInput struct { + Lang *language.Tag `json:"lang"` + Theme *Theme `json:"theme"` + UserID *ID `json:"userId"` + TeamID *ID `json:"teamId"` + Secret *string `json:"secret"` +} + +type SignupPayload struct { + User *User `json:"user"` + Team *Team `json:"team"` +} + +type SyncDatasetInput struct { + SceneID ID `json:"sceneId"` + URL string `json:"url"` +} + +type SyncDatasetPayload struct { + SceneID ID `json:"sceneId"` + URL string `json:"url"` + DatasetSchema []*DatasetSchema `json:"datasetSchema"` + Dataset []*Dataset `json:"dataset"` +} + +type TagGroup struct { + ID ID `json:"id"` + SceneID ID `json:"sceneId"` + Label string `json:"label"` + TagIds []ID `json:"tagIds"` + Tags []*TagItem `json:"tags"` + Scene *Scene `json:"scene"` + Layers []Layer `json:"layers"` +} + +func (TagGroup) IsTag() {} + +type TagItem struct { + ID ID `json:"id"` + SceneID ID `json:"sceneId"` + Label string `json:"label"` + ParentID *ID `json:"parentId"` + LinkedDatasetID *ID `json:"linkedDatasetID"` + LinkedDatasetSchemaID *ID `json:"linkedDatasetSchemaID"` + LinkedDatasetFieldID *ID `json:"linkedDatasetFieldID"` + LinkedDatasetSchema *DatasetSchema `json:"linkedDatasetSchema"` + LinkedDataset *Dataset `json:"linkedDataset"` + LinkedDatasetField *DatasetField `json:"linkedDatasetField"` + Parent *TagGroup `json:"parent"` + Layers []Layer `json:"layers"` +} + +func (TagItem) IsTag() {} + +type Team struct { + ID ID `json:"id"` + Name string `json:"name"` + Members []*TeamMember `json:"members"` + Personal bool `json:"personal"` + Assets *AssetConnection `json:"assets"` + Projects *ProjectConnection `json:"projects"` +} + +func (Team) IsNode() {} + +type TeamMember struct { + UserID ID `json:"userId"` + Role Role `json:"role"` + User *User `json:"user"` +} + +type Typography struct { + FontFamily *string `json:"fontFamily"` + FontWeight *string `json:"fontWeight"` + FontSize *int `json:"fontSize"` + Color *string `json:"color"` + TextAlign *TextAlign `json:"textAlign"` + Bold *bool `json:"bold"` + Italic *bool `json:"italic"` + Underline *bool `json:"underline"` +} + +type UninstallPluginInput struct { + SceneID ID `json:"sceneId"` + PluginID ID `json:"pluginId"` +} + +type UninstallPluginPayload struct { + PluginID ID `json:"pluginId"` + Scene *Scene `json:"scene"` +} + +type UnlinkPropertyValueInput struct { + PropertyID ID `json:"propertyId"` + SchemaGroupID *ID `json:"schemaGroupId"` + ItemID *ID `json:"itemId"` + FieldID ID `json:"fieldId"` +} + +type UpdateClusterInput struct { + ClusterID ID `json:"clusterId"` + SceneID ID `json:"sceneId"` + Name *string `json:"name"` + PropertyID *ID `json:"propertyId"` +} + +type UpdateClusterPayload struct { + Scene *Scene `json:"scene"` + Cluster *Cluster `json:"cluster"` +} + +type UpdateDatasetSchemaInput struct { + SchemaID ID `json:"schemaId"` + Name string `json:"name"` +} + +type UpdateDatasetSchemaPayload struct { + DatasetSchema *DatasetSchema `json:"datasetSchema"` +} + +type UpdateLayerInput struct { + LayerID ID `json:"layerId"` + Name *string `json:"name"` + Visible *bool `json:"visible"` +} + +type UpdateLayerPayload struct { + Layer Layer `json:"layer"` +} + +type UpdateMeInput struct { + Name *string `json:"name"` + Email *string `json:"email"` + Lang *language.Tag `json:"lang"` + Theme *Theme `json:"theme"` + Password *string `json:"password"` + PasswordConfirmation *string `json:"passwordConfirmation"` +} + +type UpdateMePayload struct { + Me *Me `json:"me"` +} + +type UpdateMemberOfTeamInput struct { + TeamID ID `json:"teamId"` + UserID ID `json:"userId"` + Role Role `json:"role"` +} + +type UpdateMemberOfTeamPayload struct { + Team *Team `json:"team"` +} + +type UpdateProjectInput struct { + ProjectID ID `json:"projectId"` + Name *string `json:"name"` + Description *string `json:"description"` + Archived *bool `json:"archived"` + IsBasicAuthActive *bool `json:"isBasicAuthActive"` + BasicAuthUsername *string `json:"basicAuthUsername"` + BasicAuthPassword *string `json:"basicAuthPassword"` + Alias *string `json:"alias"` + ImageURL *url.URL `json:"imageUrl"` + PublicTitle *string `json:"publicTitle"` + PublicDescription *string `json:"publicDescription"` + PublicImage *string `json:"publicImage"` + PublicNoIndex *bool `json:"publicNoIndex"` + DeleteImageURL *bool `json:"deleteImageUrl"` + DeletePublicImage *bool `json:"deletePublicImage"` +} + +type UpdatePropertyItemInput struct { + PropertyID ID `json:"propertyId"` + SchemaGroupID ID `json:"schemaGroupId"` + Operations []*UpdatePropertyItemOperationInput `json:"operations"` +} + +type UpdatePropertyItemOperationInput struct { + Operation ListOperation `json:"operation"` + ItemID *ID `json:"itemId"` + Index *int `json:"index"` + NameFieldValue interface{} `json:"nameFieldValue"` + NameFieldType *ValueType `json:"nameFieldType"` +} + +type UpdatePropertyValueInput struct { + PropertyID ID `json:"propertyId"` + SchemaGroupID *ID `json:"schemaGroupId"` + ItemID *ID `json:"itemId"` + FieldID ID `json:"fieldId"` + Value interface{} `json:"value"` + Type ValueType `json:"type"` +} + +type UpdateTagInput struct { + TagID ID `json:"tagId"` + SceneID ID `json:"sceneId"` + Label *string `json:"label"` +} + +type UpdateTagPayload struct { + Tag Tag `json:"tag"` +} + +type UpdateTeamInput struct { + TeamID ID `json:"teamId"` + Name string `json:"name"` +} + +type UpdateTeamPayload struct { + Team *Team `json:"team"` +} + +type UpdateWidgetAlignSystemInput struct { + SceneID ID `json:"sceneId"` + Location *WidgetLocationInput `json:"location"` + Align *WidgetAreaAlign `json:"align"` +} + +type UpdateWidgetAlignSystemPayload struct { + Scene *Scene `json:"scene"` +} + +type UpdateWidgetInput struct { + SceneID ID `json:"sceneId"` + WidgetID ID `json:"widgetId"` + Enabled *bool `json:"enabled"` + Location *WidgetLocationInput `json:"location"` + Extended *bool `json:"extended"` + Index *int `json:"index"` +} + +type UpdateWidgetPayload struct { + Scene *Scene `json:"scene"` + SceneWidget *SceneWidget `json:"sceneWidget"` +} + +type UpgradePluginInput struct { + SceneID ID `json:"sceneId"` + PluginID ID `json:"pluginId"` + ToPluginID ID `json:"toPluginId"` +} + +type UpgradePluginPayload struct { + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` +} + +type UploadFileToPropertyInput struct { + PropertyID ID `json:"propertyId"` + SchemaGroupID *ID `json:"schemaGroupId"` + ItemID *ID `json:"itemId"` + FieldID ID `json:"fieldId"` + File graphql.Upload `json:"file"` +} + +type UploadPluginInput struct { + SceneID ID `json:"sceneId"` + File *graphql.Upload `json:"file"` + URL *url.URL `json:"url"` +} + +type UploadPluginPayload struct { + Plugin *Plugin `json:"plugin"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` +} + +type User struct { + ID ID `json:"id"` + Name string `json:"name"` + Email string `json:"email"` +} + +func (User) IsNode() {} + +type WidgetAlignSystem struct { + Inner *WidgetZone `json:"inner"` + Outer *WidgetZone `json:"outer"` +} + +type WidgetArea struct { + WidgetIds []ID `json:"widgetIds"` + Align WidgetAreaAlign `json:"align"` +} + +type WidgetExtendable struct { + Vertically bool `json:"vertically"` + Horizontally bool `json:"horizontally"` +} + +type WidgetLayout struct { + Extendable *WidgetExtendable `json:"extendable"` + Extended bool `json:"extended"` + Floating bool `json:"floating"` + DefaultLocation *WidgetLocation `json:"defaultLocation"` +} + +type WidgetLocation struct { + Zone WidgetZoneType `json:"zone"` + Section WidgetSectionType `json:"section"` + Area WidgetAreaType `json:"area"` +} + +type WidgetLocationInput struct { + Zone WidgetZoneType `json:"zone"` + Section WidgetSectionType `json:"section"` + Area WidgetAreaType `json:"area"` +} + +type WidgetSection struct { + Top *WidgetArea `json:"top"` + Middle *WidgetArea `json:"middle"` + Bottom *WidgetArea `json:"bottom"` +} + +type WidgetZone struct { + Left *WidgetSection `json:"left"` + Center *WidgetSection `json:"center"` + Right *WidgetSection `json:"right"` +} + +type AssetSortType string + +const ( + AssetSortTypeDate AssetSortType = "DATE" + AssetSortTypeSize AssetSortType = "SIZE" + AssetSortTypeName AssetSortType = "NAME" +) + +var AllAssetSortType = []AssetSortType{ + AssetSortTypeDate, + AssetSortTypeSize, + AssetSortTypeName, +} + +func (e AssetSortType) IsValid() bool { + switch e { + case AssetSortTypeDate, AssetSortTypeSize, AssetSortTypeName: + return true + } + return false +} + +func (e AssetSortType) String() string { + return string(e) +} + +func (e *AssetSortType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = AssetSortType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid AssetSortType", str) + } + return nil +} + +func (e AssetSortType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type LayerEncodingFormat string + +const ( + LayerEncodingFormatKml LayerEncodingFormat = "KML" + LayerEncodingFormatCzml LayerEncodingFormat = "CZML" + LayerEncodingFormatGeojson LayerEncodingFormat = "GEOJSON" + LayerEncodingFormatShape LayerEncodingFormat = "SHAPE" + LayerEncodingFormatReearth LayerEncodingFormat = "REEARTH" +) + +var AllLayerEncodingFormat = []LayerEncodingFormat{ + LayerEncodingFormatKml, + LayerEncodingFormatCzml, + LayerEncodingFormatGeojson, + LayerEncodingFormatShape, + LayerEncodingFormatReearth, +} + +func (e LayerEncodingFormat) IsValid() bool { + switch e { + case LayerEncodingFormatKml, LayerEncodingFormatCzml, LayerEncodingFormatGeojson, LayerEncodingFormatShape, LayerEncodingFormatReearth: + return true + } + return false +} + +func (e LayerEncodingFormat) String() string { + return string(e) +} + +func (e *LayerEncodingFormat) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = LayerEncodingFormat(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid LayerEncodingFormat", str) + } + return nil +} + +func (e LayerEncodingFormat) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type ListOperation string + +const ( + ListOperationAdd ListOperation = "ADD" + ListOperationMove ListOperation = "MOVE" + ListOperationRemove ListOperation = "REMOVE" +) + +var AllListOperation = []ListOperation{ + ListOperationAdd, + ListOperationMove, + ListOperationRemove, +} + +func (e ListOperation) IsValid() bool { + switch e { + case ListOperationAdd, ListOperationMove, ListOperationRemove: + return true + } + return false +} + +func (e ListOperation) String() string { + return string(e) +} + +func (e *ListOperation) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = ListOperation(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid ListOperation", str) + } + return nil +} + +func (e ListOperation) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type NodeType string + +const ( + NodeTypeAsset NodeType = "ASSET" + NodeTypeUser NodeType = "USER" + NodeTypeTeam NodeType = "TEAM" + NodeTypeProject NodeType = "PROJECT" + NodeTypePlugin NodeType = "PLUGIN" + NodeTypeScene NodeType = "SCENE" + NodeTypePropertySchema NodeType = "PROPERTY_SCHEMA" + NodeTypeProperty NodeType = "PROPERTY" + NodeTypeDatasetSchema NodeType = "DATASET_SCHEMA" + NodeTypeDataset NodeType = "DATASET" + NodeTypeLayerGroup NodeType = "LAYER_GROUP" + NodeTypeLayerItem NodeType = "LAYER_ITEM" +) + +var AllNodeType = []NodeType{ + NodeTypeAsset, + NodeTypeUser, + NodeTypeTeam, + NodeTypeProject, + NodeTypePlugin, + NodeTypeScene, + NodeTypePropertySchema, + NodeTypeProperty, + NodeTypeDatasetSchema, + NodeTypeDataset, + NodeTypeLayerGroup, + NodeTypeLayerItem, +} + +func (e NodeType) IsValid() bool { + switch e { + case NodeTypeAsset, NodeTypeUser, NodeTypeTeam, NodeTypeProject, NodeTypePlugin, NodeTypeScene, NodeTypePropertySchema, NodeTypeProperty, NodeTypeDatasetSchema, NodeTypeDataset, NodeTypeLayerGroup, NodeTypeLayerItem: + return true + } + return false +} + +func (e NodeType) String() string { + return string(e) +} + +func (e *NodeType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = NodeType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid NodeType", str) + } + return nil +} + +func (e NodeType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type PluginExtensionType string + +const ( + PluginExtensionTypePrimitive PluginExtensionType = "PRIMITIVE" + PluginExtensionTypeWidget PluginExtensionType = "WIDGET" + PluginExtensionTypeBlock PluginExtensionType = "BLOCK" + PluginExtensionTypeVisualizer PluginExtensionType = "VISUALIZER" + PluginExtensionTypeInfobox PluginExtensionType = "INFOBOX" +) + +var AllPluginExtensionType = []PluginExtensionType{ + PluginExtensionTypePrimitive, + PluginExtensionTypeWidget, + PluginExtensionTypeBlock, + PluginExtensionTypeVisualizer, + PluginExtensionTypeInfobox, +} + +func (e PluginExtensionType) IsValid() bool { + switch e { + case PluginExtensionTypePrimitive, PluginExtensionTypeWidget, PluginExtensionTypeBlock, PluginExtensionTypeVisualizer, PluginExtensionTypeInfobox: + return true + } + return false +} + +func (e PluginExtensionType) String() string { + return string(e) +} + +func (e *PluginExtensionType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = PluginExtensionType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid PluginExtensionType", str) + } + return nil +} + +func (e PluginExtensionType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type PropertySchemaFieldUI string + +const ( + PropertySchemaFieldUILayer PropertySchemaFieldUI = "LAYER" + PropertySchemaFieldUIMultiline PropertySchemaFieldUI = "MULTILINE" + PropertySchemaFieldUISelection PropertySchemaFieldUI = "SELECTION" + PropertySchemaFieldUIColor PropertySchemaFieldUI = "COLOR" + PropertySchemaFieldUIRange PropertySchemaFieldUI = "RANGE" + PropertySchemaFieldUISlider PropertySchemaFieldUI = "SLIDER" + PropertySchemaFieldUIImage PropertySchemaFieldUI = "IMAGE" + PropertySchemaFieldUIVideo PropertySchemaFieldUI = "VIDEO" + PropertySchemaFieldUIFile PropertySchemaFieldUI = "FILE" + PropertySchemaFieldUICameraPose PropertySchemaFieldUI = "CAMERA_POSE" + PropertySchemaFieldUIDatetime PropertySchemaFieldUI = "DATETIME" +) + +var AllPropertySchemaFieldUI = []PropertySchemaFieldUI{ + PropertySchemaFieldUILayer, + PropertySchemaFieldUIMultiline, + PropertySchemaFieldUISelection, + PropertySchemaFieldUIColor, + PropertySchemaFieldUIRange, + PropertySchemaFieldUISlider, + PropertySchemaFieldUIImage, + PropertySchemaFieldUIVideo, + PropertySchemaFieldUIFile, + PropertySchemaFieldUICameraPose, + PropertySchemaFieldUIDatetime, +} + +func (e PropertySchemaFieldUI) IsValid() bool { + switch e { + case PropertySchemaFieldUILayer, PropertySchemaFieldUIMultiline, PropertySchemaFieldUISelection, PropertySchemaFieldUIColor, PropertySchemaFieldUIRange, PropertySchemaFieldUISlider, PropertySchemaFieldUIImage, PropertySchemaFieldUIVideo, PropertySchemaFieldUIFile, PropertySchemaFieldUICameraPose, PropertySchemaFieldUIDatetime: + return true + } + return false +} + +func (e PropertySchemaFieldUI) String() string { + return string(e) +} + +func (e *PropertySchemaFieldUI) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = PropertySchemaFieldUI(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid PropertySchemaFieldUI", str) + } + return nil +} + +func (e PropertySchemaFieldUI) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type PublishmentStatus string + +const ( + PublishmentStatusPublic PublishmentStatus = "PUBLIC" + PublishmentStatusLimited PublishmentStatus = "LIMITED" + PublishmentStatusPrivate PublishmentStatus = "PRIVATE" +) + +var AllPublishmentStatus = []PublishmentStatus{ + PublishmentStatusPublic, + PublishmentStatusLimited, + PublishmentStatusPrivate, +} + +func (e PublishmentStatus) IsValid() bool { + switch e { + case PublishmentStatusPublic, PublishmentStatusLimited, PublishmentStatusPrivate: + return true + } + return false +} + +func (e PublishmentStatus) String() string { + return string(e) +} + +func (e *PublishmentStatus) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = PublishmentStatus(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid PublishmentStatus", str) + } + return nil +} + +func (e PublishmentStatus) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type Role string + +const ( + RoleReader Role = "READER" + RoleWriter Role = "WRITER" + RoleOwner Role = "OWNER" +) + +var AllRole = []Role{ + RoleReader, + RoleWriter, + RoleOwner, +} + +func (e Role) IsValid() bool { + switch e { + case RoleReader, RoleWriter, RoleOwner: + return true + } + return false +} + +func (e Role) String() string { + return string(e) +} + +func (e *Role) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = Role(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid Role", str) + } + return nil +} + +func (e Role) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type TextAlign string + +const ( + TextAlignLeft TextAlign = "LEFT" + TextAlignCenter TextAlign = "CENTER" + TextAlignRight TextAlign = "RIGHT" + TextAlignJustify TextAlign = "JUSTIFY" + TextAlignJustifyAll TextAlign = "JUSTIFY_ALL" +) + +var AllTextAlign = []TextAlign{ + TextAlignLeft, + TextAlignCenter, + TextAlignRight, + TextAlignJustify, + TextAlignJustifyAll, +} + +func (e TextAlign) IsValid() bool { + switch e { + case TextAlignLeft, TextAlignCenter, TextAlignRight, TextAlignJustify, TextAlignJustifyAll: + return true + } + return false +} + +func (e TextAlign) String() string { + return string(e) +} + +func (e *TextAlign) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = TextAlign(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid TextAlign", str) + } + return nil +} + +func (e TextAlign) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type Theme string + +const ( + ThemeDefault Theme = "DEFAULT" + ThemeLight Theme = "LIGHT" + ThemeDark Theme = "DARK" +) + +var AllTheme = []Theme{ + ThemeDefault, + ThemeLight, + ThemeDark, +} + +func (e Theme) IsValid() bool { + switch e { + case ThemeDefault, ThemeLight, ThemeDark: + return true + } + return false +} + +func (e Theme) String() string { + return string(e) +} + +func (e *Theme) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = Theme(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid Theme", str) + } + return nil +} + +func (e Theme) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type ValueType string + +const ( + ValueTypeBool ValueType = "BOOL" + ValueTypeNumber ValueType = "NUMBER" + ValueTypeString ValueType = "STRING" + ValueTypeRef ValueType = "REF" + ValueTypeURL ValueType = "URL" + ValueTypeLatlng ValueType = "LATLNG" + ValueTypeLatlngheight ValueType = "LATLNGHEIGHT" + ValueTypeCamera ValueType = "CAMERA" + ValueTypeTypography ValueType = "TYPOGRAPHY" + ValueTypeCoordinates ValueType = "COORDINATES" + ValueTypePolygon ValueType = "POLYGON" + ValueTypeRect ValueType = "RECT" +) + +var AllValueType = []ValueType{ + ValueTypeBool, + ValueTypeNumber, + ValueTypeString, + ValueTypeRef, + ValueTypeURL, + ValueTypeLatlng, + ValueTypeLatlngheight, + ValueTypeCamera, + ValueTypeTypography, + ValueTypeCoordinates, + ValueTypePolygon, + ValueTypeRect, +} + +func (e ValueType) IsValid() bool { + switch e { + case ValueTypeBool, ValueTypeNumber, ValueTypeString, ValueTypeRef, ValueTypeURL, ValueTypeLatlng, ValueTypeLatlngheight, ValueTypeCamera, ValueTypeTypography, ValueTypeCoordinates, ValueTypePolygon, ValueTypeRect: + return true + } + return false +} + +func (e ValueType) String() string { + return string(e) +} + +func (e *ValueType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = ValueType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid ValueType", str) + } + return nil +} + +func (e ValueType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type Visualizer string + +const ( + VisualizerCesium Visualizer = "CESIUM" +) + +var AllVisualizer = []Visualizer{ + VisualizerCesium, +} + +func (e Visualizer) IsValid() bool { + switch e { + case VisualizerCesium: + return true + } + return false +} + +func (e Visualizer) String() string { + return string(e) +} + +func (e *Visualizer) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = Visualizer(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid Visualizer", str) + } + return nil +} + +func (e Visualizer) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type WidgetAreaAlign string + +const ( + WidgetAreaAlignStart WidgetAreaAlign = "START" + WidgetAreaAlignCentered WidgetAreaAlign = "CENTERED" + WidgetAreaAlignEnd WidgetAreaAlign = "END" +) + +var AllWidgetAreaAlign = []WidgetAreaAlign{ + WidgetAreaAlignStart, + WidgetAreaAlignCentered, + WidgetAreaAlignEnd, +} + +func (e WidgetAreaAlign) IsValid() bool { + switch e { + case WidgetAreaAlignStart, WidgetAreaAlignCentered, WidgetAreaAlignEnd: + return true + } + return false +} + +func (e WidgetAreaAlign) String() string { + return string(e) +} + +func (e *WidgetAreaAlign) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = WidgetAreaAlign(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid WidgetAreaAlign", str) + } + return nil +} + +func (e WidgetAreaAlign) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type WidgetAreaType string + +const ( + WidgetAreaTypeTop WidgetAreaType = "TOP" + WidgetAreaTypeMiddle WidgetAreaType = "MIDDLE" + WidgetAreaTypeBottom WidgetAreaType = "BOTTOM" +) + +var AllWidgetAreaType = []WidgetAreaType{ + WidgetAreaTypeTop, + WidgetAreaTypeMiddle, + WidgetAreaTypeBottom, +} + +func (e WidgetAreaType) IsValid() bool { + switch e { + case WidgetAreaTypeTop, WidgetAreaTypeMiddle, WidgetAreaTypeBottom: + return true + } + return false +} + +func (e WidgetAreaType) String() string { + return string(e) +} + +func (e *WidgetAreaType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = WidgetAreaType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid WidgetAreaType", str) + } + return nil +} + +func (e WidgetAreaType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type WidgetSectionType string + +const ( + WidgetSectionTypeLeft WidgetSectionType = "LEFT" + WidgetSectionTypeCenter WidgetSectionType = "CENTER" + WidgetSectionTypeRight WidgetSectionType = "RIGHT" +) + +var AllWidgetSectionType = []WidgetSectionType{ + WidgetSectionTypeLeft, + WidgetSectionTypeCenter, + WidgetSectionTypeRight, +} + +func (e WidgetSectionType) IsValid() bool { + switch e { + case WidgetSectionTypeLeft, WidgetSectionTypeCenter, WidgetSectionTypeRight: + return true + } + return false +} + +func (e WidgetSectionType) String() string { + return string(e) +} + +func (e *WidgetSectionType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = WidgetSectionType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid WidgetSectionType", str) + } + return nil +} + +func (e WidgetSectionType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type WidgetZoneType string + +const ( + WidgetZoneTypeInner WidgetZoneType = "INNER" + WidgetZoneTypeOuter WidgetZoneType = "OUTER" +) + +var AllWidgetZoneType = []WidgetZoneType{ + WidgetZoneTypeInner, + WidgetZoneTypeOuter, +} + +func (e WidgetZoneType) IsValid() bool { + switch e { + case WidgetZoneTypeInner, WidgetZoneTypeOuter: + return true + } + return false +} + +func (e WidgetZoneType) String() string { + return string(e) +} + +func (e *WidgetZoneType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = WidgetZoneType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid WidgetZoneType", str) + } + return nil +} + +func (e WidgetZoneType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} diff --git a/server/internal/adapter/gql/gqlmodel/scalar.go b/server/internal/adapter/gql/gqlmodel/scalar.go new file mode 100644 index 000000000..7968d1e22 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/scalar.go @@ -0,0 +1,77 @@ +package gqlmodel + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "net/url" + "strconv" + + "github.com/99designs/gqlgen/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "golang.org/x/text/language" +) + +func MarshalURL(t url.URL) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalURL(v interface{}) (url.URL, error) { + if tmpStr, ok := v.(string); ok { + u, err := url.Parse(tmpStr) + if u != nil { + return *u, err + } + return url.URL{}, err + } + return url.URL{}, errors.New("invalid URL") +} + +func MarshalLang(t language.Tag) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalLang(v interface{}) (language.Tag, error) { + if tmpStr, ok := v.(string); ok { + if tmpStr == "" { + return language.Tag{}, nil + } + l, err := language.Parse(tmpStr) + if err != nil { + return language.Tag{}, err + } + return l, nil + } + return language.Tag{}, errors.New("invalid lang") +} + +func MarshalCursor(t usecase.Cursor) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(string(t))) + }) +} + +func UnmarshalCursor(v interface{}) (usecase.Cursor, error) { + if tmpStr, ok := v.(string); ok { + return usecase.Cursor(tmpStr), nil + } + return usecase.Cursor(""), errors.New("invalid cursor") +} + +func MarshalMap(val map[string]string) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _ = json.NewEncoder(w).Encode(val) + }) +} + +func UnmarshalMap(v interface{}) (map[string]string, error) { + if m, ok := v.(map[string]string); ok { + return m, nil + } + return nil, fmt.Errorf("%T is not a map", v) +} diff --git a/server/internal/adapter/gql/gqlmodel/scalar_id.go b/server/internal/adapter/gql/gqlmodel/scalar_id.go new file mode 100644 index 000000000..80cf8d422 --- /dev/null +++ b/server/internal/adapter/gql/gqlmodel/scalar_id.go @@ -0,0 +1,174 @@ +package gqlmodel + +import ( + "errors" + "io" + "strconv" + + "github.com/99designs/gqlgen/graphql" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/id/idx" +) + +type ID string + +func MarshalPropertyFieldID(t id.PropertyFieldID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPropertyFieldID(v interface{}) (id.PropertyFieldID, error) { + if tmpStr, ok := v.(string); ok { + return id.PropertyFieldID(tmpStr), nil + } + return id.PropertyFieldID(""), errors.New("invalid ID") +} + +func MarshalDatasetFieldID(t id.DatasetFieldID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalDatasetFieldID(v interface{}) (id.DatasetFieldID, error) { + if tmpStr, ok := v.(string); ok { + return id.DatasetFieldIDFrom(tmpStr) + } + return id.NewDatasetFieldID(), errors.New("invalid ID") +} + +func MarshalPluginID(t id.PluginID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPluginID(v interface{}) (id.PluginID, error) { + if tmpStr, ok := v.(string); ok { + return id.PluginIDFrom(tmpStr) + } + return id.PluginID{}, errors.New("invalid ID") +} + +func MarshalPluginExtensionID(t id.PluginExtensionID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPluginExtensionID(v interface{}) (id.PluginExtensionID, error) { + if tmpStr, ok := v.(string); ok { + return id.PluginExtensionID(tmpStr), nil + } + return id.PluginExtensionID(""), errors.New("invalid ID") +} + +func MarshalPropertySchemaID(t id.PropertySchemaID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPropertySchemaID(v interface{}) (id.PropertySchemaID, error) { + if tmpStr, ok := v.(string); ok { + return id.PropertySchemaIDFrom(tmpStr) + } + return id.PropertySchemaID{}, errors.New("invalid ID") +} + +func MarshalPropertySchemaGroupID(t id.PropertySchemaGroupID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPropertySchemaGroupID(v interface{}) (id.PropertySchemaGroupID, error) { + if tmpStr, ok := v.(string); ok { + return id.PropertySchemaGroupID(tmpStr), nil + } + return id.PropertySchemaGroupID(""), errors.New("invalid ID") +} + +func IDFrom[T idx.Type](i idx.ID[T]) ID { + return ID(i.String()) +} + +func IDFromRef[T idx.Type](i *idx.ID[T]) *ID { + return (*ID)(i.StringRef()) +} + +func IDFromStringRef[T idx.Type](i *idx.StringID[T]) *ID { + return (*ID)(i) +} + +func IDFromPluginID(i id.PluginID) ID { + return ID(i.String()) +} + +func IDFromPluginIDRef(i *id.PluginID) *ID { + return (*ID)(i.StringRef()) +} + +func IDFromPropertySchemaID(i id.PropertySchemaID) ID { + return ID(i.String()) +} + +func IDFromPropertySchemaIDRef(i *id.PropertySchemaID) *ID { + return (*ID)(i.StringRef()) +} + +func ToID[A idx.Type](a ID) (idx.ID[A], error) { + return idx.From[A](string(a)) +} + +func ToID2[A, B idx.Type](a, b ID) (ai idx.ID[A], bi idx.ID[B], err error) { + ai, err = ToID[A](a) + if err != nil { + return + } + bi, err = ToID[B](b) + return +} + +func ToID3[A, B, C idx.Type](a, b, c ID) (ai idx.ID[A], bi idx.ID[B], ci idx.ID[C], err error) { + ai, bi, err = ToID2[A, B](a, b) + if err != nil { + return + } + ci, err = ToID[C](c) + return +} + +func ToIDRef[A idx.Type](a *ID) *idx.ID[A] { + return idx.FromRef[A]((*string)(a)) +} + +func ToStringIDRef[T idx.Type](a *ID) *idx.StringID[T] { + return idx.StringIDFromRef[T]((*string)(a)) +} + +func ToPropertySchemaID(a ID) (id.PropertySchemaID, error) { + return id.PropertySchemaIDFrom((string)(a)) +} + +func ToPluginID(a ID) (id.PluginID, error) { + return id.PluginIDFrom((string)(a)) +} + +func ToPluginID2(a, b ID) (ai id.PluginID, bi id.PluginID, err error) { + ai, err = id.PluginIDFrom((string)(a)) + if err != nil { + return + } + bi, err = ToPluginID(b) + return ai, bi, err +} + +func ToPropertySchemaIDRef(a *ID) *id.PropertySchemaID { + return id.PropertySchemaIDFromRef((*string)(a)) +} + +func ToPluginIDRef(a *ID) *id.PluginID { + return id.PluginIDFromRef((*string)(a)) +} diff --git a/server/internal/adapter/gql/loader.go b/server/internal/adapter/gql/loader.go new file mode 100644 index 000000000..78c50ef92 --- /dev/null +++ b/server/internal/adapter/gql/loader.go @@ -0,0 +1,114 @@ +package gql + +import ( + "context" + "time" + + "github.com/reearth/reearth-backend/internal/usecase/interfaces" +) + +const ( + dataLoaderWait = 1 * time.Millisecond + dataLoaderMaxBatch = 100 +) + +type Loaders struct { + usecases interfaces.Container + Asset *AssetLoader + Dataset *DatasetLoader + Layer *LayerLoader + Plugin *PluginLoader + Project *ProjectLoader + Property *PropertyLoader + Scene *SceneLoader + Team *TeamLoader + User *UserLoader + Tag *TagLoader +} + +type DataLoaders struct { + Asset AssetDataLoader + Dataset DatasetDataLoader + DatasetSchema DatasetSchemaDataLoader + LayerItem LayerItemDataLoader + LayerGroup LayerGroupDataLoader + Layer LayerDataLoader + Plugin PluginDataLoader + Project ProjectDataLoader + Property PropertyDataLoader + PropertySchema PropertySchemaDataLoader + Scene SceneDataLoader + Team TeamDataLoader + User UserDataLoader + Tag TagDataLoader + TagItem TagItemDataLoader + TagGroup TagGroupDataLoader +} + +func NewLoaders(usecases *interfaces.Container) *Loaders { + if usecases == nil { + return nil + } + return &Loaders{ + usecases: *usecases, + Asset: NewAssetLoader(usecases.Asset), + Dataset: NewDatasetLoader(usecases.Dataset), + Layer: NewLayerLoader(usecases.Layer), + Plugin: NewPluginLoader(usecases.Plugin), + Project: NewProjectLoader(usecases.Project), + Property: NewPropertyLoader(usecases.Property), + Scene: NewSceneLoader(usecases.Scene), + Team: NewTeamLoader(usecases.Team), + User: NewUserLoader(usecases.User), + Tag: NewTagLoader(usecases.Tag), + } +} + +func (l Loaders) DataLoadersWith(ctx context.Context, enabled bool) *DataLoaders { + if enabled { + return l.DataLoaders(ctx) + } + return l.OrdinaryDataLoaders(ctx) +} + +func (l Loaders) DataLoaders(ctx context.Context) *DataLoaders { + return &DataLoaders{ + Asset: l.Asset.DataLoader(ctx), + Dataset: l.Dataset.DataLoader(ctx), + DatasetSchema: l.Dataset.SchemaDataLoader(ctx), + LayerItem: l.Layer.ItemDataLoader(ctx), + LayerGroup: l.Layer.GroupDataLoader(ctx), + Layer: l.Layer.DataLoader(ctx), + Plugin: l.Plugin.DataLoader(ctx), + Project: l.Project.DataLoader(ctx), + Property: l.Property.DataLoader(ctx), + PropertySchema: l.Property.SchemaDataLoader(ctx), + Scene: l.Scene.DataLoader(ctx), + Team: l.Team.DataLoader(ctx), + User: l.User.DataLoader(ctx), + Tag: l.Tag.DataLoader(ctx), + TagItem: l.Tag.ItemDataLoader(ctx), + TagGroup: l.Tag.GroupDataLoader(ctx), + } +} + +func (l Loaders) OrdinaryDataLoaders(ctx context.Context) *DataLoaders { + return &DataLoaders{ + Asset: l.Asset.OrdinaryDataLoader(ctx), + Dataset: l.Dataset.OrdinaryDataLoader(ctx), + DatasetSchema: l.Dataset.SchemaOrdinaryDataLoader(ctx), + LayerItem: l.Layer.ItemOrdinaryDataLoader(ctx), + LayerGroup: l.Layer.GroupOrdinaryDataLoader(ctx), + Layer: l.Layer.OrdinaryDataLoader(ctx), + Plugin: l.Plugin.OrdinaryDataLoader(ctx), + Project: l.Project.OrdinaryDataLoader(ctx), + Property: l.Property.OrdinaryDataLoader(ctx), + PropertySchema: l.Property.SchemaOrdinaryDataLoader(ctx), + Scene: l.Scene.OrdinaryDataLoader(ctx), + Team: l.Team.OrdinaryDataLoader(ctx), + User: l.User.OrdinaryDataLoader(ctx), + Tag: l.Tag.OrdinaryDataLoader(ctx), + TagItem: l.Tag.ItemDataLoader(ctx), + TagGroup: l.Tag.GroupDataLoader(ctx), + } +} diff --git a/server/internal/adapter/gql/loader_asset.go b/server/internal/adapter/gql/loader_asset.go new file mode 100644 index 000000000..03f7fc563 --- /dev/null +++ b/server/internal/adapter/gql/loader_asset.go @@ -0,0 +1,106 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" +) + +type AssetLoader struct { + usecase interfaces.Asset +} + +func NewAssetLoader(usecase interfaces.Asset) *AssetLoader { + return &AssetLoader{usecase: usecase} +} + +func (c *AssetLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Asset, []error) { + ids2, err := util.TryMap(ids, gqlmodel.ToID[id.Asset]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, ids2, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + return util.Map(res, gqlmodel.ToAsset), nil +} + +func (c *AssetLoader) FindByTeam(ctx context.Context, teamID gqlmodel.ID, keyword *string, sort *asset.SortType, pagination *gqlmodel.Pagination) (*gqlmodel.AssetConnection, error) { + tid, err := gqlmodel.ToID[id.Team](teamID) + if err != nil { + return nil, err + } + + assets, pi, err := c.usecase.FindByTeam(ctx, tid, keyword, sort, gqlmodel.ToPagination(pagination), getOperator(ctx)) + if err != nil { + return nil, err + } + + edges := make([]*gqlmodel.AssetEdge, 0, len(assets)) + nodes := make([]*gqlmodel.Asset, 0, len(assets)) + for _, a := range assets { + asset := gqlmodel.ToAsset(a) + edges = append(edges, &gqlmodel.AssetEdge{ + Node: asset, + Cursor: usecase.Cursor(asset.ID), + }) + nodes = append(nodes, asset) + } + + return &gqlmodel.AssetConnection{ + Edges: edges, + Nodes: nodes, + PageInfo: gqlmodel.ToPageInfo(pi), + TotalCount: pi.TotalCount(), + }, nil +} + +// data loader + +type AssetDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.Asset, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Asset, []error) +} + +func (c *AssetLoader) DataLoader(ctx context.Context) AssetDataLoader { + return gqldataloader.NewAssetLoader(gqldataloader.AssetLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Asset, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *AssetLoader) OrdinaryDataLoader(ctx context.Context) AssetDataLoader { + return &ordinaryAssetLoader{ctx: ctx, c: c} +} + +type ordinaryAssetLoader struct { + ctx context.Context + c *AssetLoader +} + +func (l *ordinaryAssetLoader) Load(key gqlmodel.ID) (*gqlmodel.Asset, error) { + res, errs := l.c.Fetch(l.ctx, []gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryAssetLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Asset, []error) { + return l.c.Fetch(l.ctx, keys) +} diff --git a/server/internal/adapter/gql/loader_dataset.go b/server/internal/adapter/gql/loader_dataset.go new file mode 100644 index 000000000..d310aed8c --- /dev/null +++ b/server/internal/adapter/gql/loader_dataset.go @@ -0,0 +1,275 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" +) + +type DatasetLoader struct { + usecase interfaces.Dataset +} + +func NewDatasetLoader(usecase interfaces.Dataset) *DatasetLoader { + return &DatasetLoader{usecase: usecase} +} + +func (c *DatasetLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Dataset, []error) { + datasetids, err := util.TryMap(ids, gqlmodel.ToID[id.Dataset]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, datasetids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + datasets := make([]*gqlmodel.Dataset, 0, len(res)) + for _, d := range res { + datasets = append(datasets, gqlmodel.ToDataset(d)) + } + + return datasets, nil +} + +func (c *DatasetLoader) FetchSchema(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) { + schemaids, err := util.TryMap(ids, gqlmodel.ToID[id.DatasetSchema]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.FetchSchema(ctx, schemaids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + schemas := make([]*gqlmodel.DatasetSchema, 0, len(res)) + for _, d := range res { + schemas = append(schemas, gqlmodel.ToDatasetSchema(d)) + } + + return schemas, nil +} + +func (c *DatasetLoader) GraphFetch(ctx context.Context, i gqlmodel.ID, depth int) ([]*gqlmodel.Dataset, []error) { + did, err := gqlmodel.ToID[id.Dataset](i) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.GraphFetch(ctx, did, depth, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + datasets := make([]*gqlmodel.Dataset, 0, len(res)) + for _, d := range res { + datasets = append(datasets, gqlmodel.ToDataset(d)) + } + + return datasets, nil +} + +func (c *DatasetLoader) GraphFetchSchema(ctx context.Context, i gqlmodel.ID, depth int) ([]*gqlmodel.DatasetSchema, []error) { + did, err := gqlmodel.ToID[id.DatasetSchema](i) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.GraphFetchSchema(ctx, did, depth, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + schemas := make([]*gqlmodel.DatasetSchema, 0, len(res)) + for _, d := range res { + schemas = append(schemas, gqlmodel.ToDatasetSchema(d)) + } + + return schemas, nil +} + +func (c *DatasetLoader) FindSchemaByScene(ctx context.Context, i gqlmodel.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) { + sid, err := gqlmodel.ToID[id.Scene](i) + if err != nil { + return nil, err + } + + res, pi, err := c.usecase.FindSchemaByScene(ctx, sid, usecase.NewPagination(first, last, before, after), getOperator(ctx)) + if err != nil { + return nil, err + } + + edges := make([]*gqlmodel.DatasetSchemaEdge, 0, len(res)) + nodes := make([]*gqlmodel.DatasetSchema, 0, len(res)) + for _, dataset := range res { + ds := gqlmodel.ToDatasetSchema(dataset) + edges = append(edges, &gqlmodel.DatasetSchemaEdge{ + Node: ds, + Cursor: usecase.Cursor(ds.ID), + }) + nodes = append(nodes, ds) + } + + return &gqlmodel.DatasetSchemaConnection{ + Edges: edges, + Nodes: nodes, + PageInfo: gqlmodel.ToPageInfo(pi), + TotalCount: pi.TotalCount(), + }, nil +} + +func (c *DatasetLoader) FindDynamicSchemasByScene(ctx context.Context, sid gqlmodel.ID) ([]*gqlmodel.DatasetSchema, error) { + sceneid, err := gqlmodel.ToID[id.Scene](sid) + if err != nil { + return nil, err + } + + res, err := c.usecase.FindDynamicSchemaByScene(ctx, sceneid) + if err != nil { + return nil, err + } + + dss := []*gqlmodel.DatasetSchema{} + for _, dataset := range res { + dss = append(dss, gqlmodel.ToDatasetSchema(dataset)) + } + + return dss, nil +} + +func (c *DatasetLoader) FindBySchema(ctx context.Context, dsid gqlmodel.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.DatasetConnection, error) { + schemaid, err := gqlmodel.ToID[id.DatasetSchema](dsid) + if err != nil { + return nil, err + } + + p := usecase.NewPagination(first, last, before, after) + res, pi, err2 := c.usecase.FindBySchema(ctx, schemaid, p, getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + edges := make([]*gqlmodel.DatasetEdge, 0, len(res)) + nodes := make([]*gqlmodel.Dataset, 0, len(res)) + for _, dataset := range res { + ds := gqlmodel.ToDataset(dataset) + edges = append(edges, &gqlmodel.DatasetEdge{ + Node: ds, + Cursor: usecase.Cursor(ds.ID), + }) + nodes = append(nodes, ds) + } + + conn := &gqlmodel.DatasetConnection{ + Edges: edges, + Nodes: nodes, + PageInfo: gqlmodel.ToPageInfo(pi), + TotalCount: pi.TotalCount(), + } + + return conn, nil +} + +func (c *DatasetLoader) CountBySchema(ctx context.Context, dsid gqlmodel.ID) (int, error) { + id, err := gqlmodel.ToID[id.DatasetSchema](dsid) + if err != nil { + return 0, err + } + + cnt, err := c.usecase.CountBySchema(ctx, id) + if err != nil { + return 0, err + } + + return cnt, nil +} + +// data loader + +type DatasetDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.Dataset, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Dataset, []error) +} + +func (c *DatasetLoader) DataLoader(ctx context.Context) DatasetDataLoader { + return gqldataloader.NewDatasetLoader(gqldataloader.DatasetLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Dataset, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *DatasetLoader) OrdinaryDataLoader(ctx context.Context) DatasetDataLoader { + return &ordinaryDatasetLoader{ctx: ctx, c: c} +} + +type ordinaryDatasetLoader struct { + ctx context.Context + c *DatasetLoader +} + +func (l *ordinaryDatasetLoader) Load(key gqlmodel.ID) (*gqlmodel.Dataset, error) { + res, errs := l.c.Fetch(l.ctx, []gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryDatasetLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Dataset, []error) { + return l.c.Fetch(l.ctx, keys) +} + +type DatasetSchemaDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.DatasetSchema, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) +} + +func (c *DatasetLoader) SchemaDataLoader(ctx context.Context) DatasetSchemaDataLoader { + return gqldataloader.NewDatasetSchemaLoader(gqldataloader.DatasetSchemaLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) { + return c.FetchSchema(ctx, keys) + }, + }) +} + +func (c *DatasetLoader) SchemaOrdinaryDataLoader(ctx context.Context) DatasetSchemaDataLoader { + return &ordinaryDatasetSchemaLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) { + return c.FetchSchema(ctx, keys) + }, + } +} + +type ordinaryDatasetSchemaLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) +} + +func (l *ordinaryDatasetSchemaLoader) Load(key gqlmodel.ID) (*gqlmodel.DatasetSchema, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryDatasetSchemaLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) { + return l.fetch(keys) +} diff --git a/server/internal/adapter/gql/loader_layer.go b/server/internal/adapter/gql/loader_layer.go new file mode 100644 index 000000000..b5f7293af --- /dev/null +++ b/server/internal/adapter/gql/loader_layer.go @@ -0,0 +1,288 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" +) + +type LayerLoader struct { + usecase interfaces.Layer +} + +func NewLayerLoader(usecase interfaces.Layer) *LayerLoader { + return &LayerLoader{usecase: usecase} +} + +func (c *LayerLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Layer, []error) { + layerids, err := util.TryMap(ids, gqlmodel.ToID[id.Layer]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, layerids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + layers := make([]*gqlmodel.Layer, 0, len(res)) + for _, l := range res { + if l == nil { + layers = append(layers, nil) + } else { + layer := gqlmodel.ToLayer(*l, nil) + layers = append(layers, &layer) + } + } + + return layers, nil +} + +func (c *LayerLoader) FetchGroup(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) { + layerids, err := util.TryMap(ids, gqlmodel.ToID[id.Layer]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.FetchGroup(ctx, layerids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + layerGroups := make([]*gqlmodel.LayerGroup, 0, len(res)) + for _, l := range res { + layerGroups = append(layerGroups, gqlmodel.ToLayerGroup(l, nil)) + } + + return layerGroups, nil +} + +func (c *LayerLoader) FetchItem(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) { + layerids, err := util.TryMap(ids, gqlmodel.ToID[id.Layer]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.FetchItem(ctx, layerids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + layerItems := make([]*gqlmodel.LayerItem, 0, len(res)) + for _, l := range res { + layerItems = append(layerItems, gqlmodel.ToLayerItem(l, nil)) + } + + return layerItems, nil +} + +func (c *LayerLoader) FetchParent(ctx context.Context, lid gqlmodel.ID) (*gqlmodel.LayerGroup, error) { + layerid, err := gqlmodel.ToID[id.Layer](lid) + if err != nil { + return nil, err + } + + res, err := c.usecase.FetchParent(ctx, layerid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return gqlmodel.ToLayerGroup(res, nil), nil +} + +func (c *LayerLoader) FetchByProperty(ctx context.Context, pid gqlmodel.ID) (gqlmodel.Layer, error) { + propertyid, err := gqlmodel.ToID[id.Property](pid) + if err != nil { + return nil, err + } + + res, err := c.usecase.FetchByProperty(ctx, propertyid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return gqlmodel.ToLayer(res, nil), nil +} + +func (c *LayerLoader) FetchMerged(ctx context.Context, org gqlmodel.ID, parent *gqlmodel.ID) (*gqlmodel.MergedLayer, error) { + orgid, err := gqlmodel.ToID[id.Layer](org) + if err != nil { + return nil, err + } + + res, err2 := c.usecase.FetchMerged(ctx, orgid, gqlmodel.ToIDRef[id.Layer](parent), getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + return gqlmodel.ToMergedLayer(res), nil +} + +func (c *LayerLoader) FetchParentAndMerged(ctx context.Context, org gqlmodel.ID) (*gqlmodel.MergedLayer, error) { + orgid, err := gqlmodel.ToID[id.Layer](org) + if err != nil { + return nil, err + } + + res, err2 := c.usecase.FetchParentAndMerged(ctx, orgid, getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + return gqlmodel.ToMergedLayer(res), nil +} + +func (c *LayerLoader) FetchByTag(ctx context.Context, tag gqlmodel.ID) ([]gqlmodel.Layer, error) { + tagid, err := gqlmodel.ToID[id.Tag](tag) + if err != nil { + return nil, err + } + + res, err2 := c.usecase.FetchByTag(ctx, tagid, getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + layers := make([]gqlmodel.Layer, 0, len(res)) + for _, l := range res { + if l == nil { + layers = append(layers, nil) + } else { + layers = append(layers, gqlmodel.ToLayer(*l, nil)) + } + } + + return layers, nil +} + +// data loader + +type LayerDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.Layer, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Layer, []error) +} + +func (c *LayerLoader) DataLoader(ctx context.Context) LayerDataLoader { + return gqldataloader.NewLayerLoader(gqldataloader.LayerLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *LayerLoader) OrdinaryDataLoader(ctx context.Context) LayerDataLoader { + return &ordinaryLayerLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryLayerLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) +} + +func (l *ordinaryLayerLoader) Load(key gqlmodel.ID) (*gqlmodel.Layer, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryLayerLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) { + return l.fetch(keys) +} + +type LayerItemDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.LayerItem, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) +} + +func (c *LayerLoader) ItemDataLoader(ctx context.Context) LayerItemDataLoader { + return gqldataloader.NewLayerItemLoader(gqldataloader.LayerItemLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) { + return c.FetchItem(ctx, keys) + }, + }) +} + +func (c *LayerLoader) ItemOrdinaryDataLoader(ctx context.Context) LayerItemDataLoader { + return &ordinaryLayerItemLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) { + return c.FetchItem(ctx, keys) + }, + } +} + +type ordinaryLayerItemLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) +} + +func (l *ordinaryLayerItemLoader) Load(key gqlmodel.ID) (*gqlmodel.LayerItem, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryLayerItemLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) { + return l.fetch(keys) +} + +type LayerGroupDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.LayerGroup, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) +} + +func (c *LayerLoader) GroupDataLoader(ctx context.Context) LayerGroupDataLoader { + return gqldataloader.NewLayerGroupLoader(gqldataloader.LayerGroupLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) { + return c.FetchGroup(ctx, keys) + }, + }) +} + +func (c *LayerLoader) GroupOrdinaryDataLoader(ctx context.Context) LayerGroupDataLoader { + return &ordinaryLayerGroupLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) { + return c.FetchGroup(ctx, keys) + }, + } +} + +type ordinaryLayerGroupLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) +} + +func (l *ordinaryLayerGroupLoader) Load(key gqlmodel.ID) (*gqlmodel.LayerGroup, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryLayerGroupLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) { + return l.fetch(keys) +} diff --git a/server/internal/adapter/gql/loader_plugin.go b/server/internal/adapter/gql/loader_plugin.go new file mode 100644 index 000000000..1ec7ac0b4 --- /dev/null +++ b/server/internal/adapter/gql/loader_plugin.go @@ -0,0 +1,81 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/util" +) + +type PluginLoader struct { + usecase interfaces.Plugin +} + +func NewPluginLoader(usecase interfaces.Plugin) *PluginLoader { + return &PluginLoader{usecase: usecase} +} + +func (c *PluginLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) { + ids2, err := util.TryMap(ids, gqlmodel.ToPluginID) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, ids2, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + plugins := make([]*gqlmodel.Plugin, 0, len(res)) + for _, pl := range res { + plugins = append(plugins, gqlmodel.ToPlugin(pl)) + } + + return plugins, nil +} + +// data loader + +type PluginDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.Plugin, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Plugin, []error) +} + +func (c *PluginLoader) DataLoader(ctx context.Context) PluginDataLoader { + return gqldataloader.NewPluginLoader(gqldataloader.PluginLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *PluginLoader) OrdinaryDataLoader(ctx context.Context) PluginDataLoader { + return &ordinaryPluginLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryPluginLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) +} + +func (l *ordinaryPluginLoader) Load(key gqlmodel.ID) (*gqlmodel.Plugin, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryPluginLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) { + return l.fetch(keys) +} diff --git a/server/internal/adapter/gql/loader_project.go b/server/internal/adapter/gql/loader_project.go new file mode 100644 index 000000000..7cea429f2 --- /dev/null +++ b/server/internal/adapter/gql/loader_project.go @@ -0,0 +1,122 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" +) + +type ProjectLoader struct { + usecase interfaces.Project +} + +func NewProjectLoader(usecase interfaces.Project) *ProjectLoader { + return &ProjectLoader{usecase: usecase} +} + +func (c *ProjectLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Project, []error) { + ids2, err := util.TryMap(ids, gqlmodel.ToID[id.Project]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, ids2, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + projects := make([]*gqlmodel.Project, 0, len(res)) + for _, project := range res { + projects = append(projects, gqlmodel.ToProject(project)) + } + + return projects, nil +} + +func (c *ProjectLoader) FindByTeam(ctx context.Context, teamID gqlmodel.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { + tid, err := gqlmodel.ToID[id.Team](teamID) + if err != nil { + return nil, err + } + + res, pi, err := c.usecase.FindByTeam(ctx, tid, usecase.NewPagination(first, last, before, after), getOperator(ctx)) + if err != nil { + return nil, err + } + + edges := make([]*gqlmodel.ProjectEdge, 0, len(res)) + nodes := make([]*gqlmodel.Project, 0, len(res)) + for _, p := range res { + prj := gqlmodel.ToProject(p) + edges = append(edges, &gqlmodel.ProjectEdge{ + Node: prj, + Cursor: usecase.Cursor(prj.ID), + }) + nodes = append(nodes, prj) + } + + return &gqlmodel.ProjectConnection{ + Edges: edges, + Nodes: nodes, + PageInfo: gqlmodel.ToPageInfo(pi), + TotalCount: pi.TotalCount(), + }, nil +} + +func (c *ProjectLoader) CheckAlias(ctx context.Context, alias string) (*gqlmodel.ProjectAliasAvailability, error) { + ok, err := c.usecase.CheckAlias(ctx, alias) + if err != nil { + return nil, err + } + + return &gqlmodel.ProjectAliasAvailability{Alias: alias, Available: ok}, nil +} + +// data loaders + +type ProjectDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.Project, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Project, []error) +} + +func (c *ProjectLoader) DataLoader(ctx context.Context) ProjectDataLoader { + return gqldataloader.NewProjectLoader(gqldataloader.ProjectLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *ProjectLoader) OrdinaryDataLoader(ctx context.Context) ProjectDataLoader { + return &ordinaryProjectLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryProjectLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) +} + +func (l *ordinaryProjectLoader) Load(key gqlmodel.ID) (*gqlmodel.Project, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryProjectLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) { + return l.fetch(keys) +} diff --git a/server/internal/adapter/gql/loader_property.go b/server/internal/adapter/gql/loader_property.go new file mode 100644 index 000000000..0bba8f3f6 --- /dev/null +++ b/server/internal/adapter/gql/loader_property.go @@ -0,0 +1,153 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" +) + +type PropertyLoader struct { + usecase interfaces.Property +} + +func NewPropertyLoader(usecase interfaces.Property) *PropertyLoader { + return &PropertyLoader{usecase: usecase} +} + +func (c *PropertyLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Property, []error) { + ids2, err := util.TryMap(ids, gqlmodel.ToID[id.Property]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, ids2, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + properties := make([]*gqlmodel.Property, 0, len(res)) + for _, property := range res { + properties = append(properties, gqlmodel.ToProperty(property)) + } + + return properties, nil +} + +func (c *PropertyLoader) FetchSchema(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) { + ids2, err := util.TryMap(ids, gqlmodel.ToPropertySchemaID) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.FetchSchema(ctx, ids2, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + return util.Map(res, gqlmodel.ToPropertySchema), nil +} + +func (c *PropertyLoader) FetchMerged(ctx context.Context, org, parent, linked *gqlmodel.ID) (*gqlmodel.MergedProperty, error) { + res, err := c.usecase.FetchMerged( + ctx, + gqlmodel.ToIDRef[id.Property](org), + gqlmodel.ToIDRef[id.Property](parent), + gqlmodel.ToIDRef[id.Dataset](linked), + getOperator(ctx), + ) + if err != nil { + return nil, err + } + + return gqlmodel.ToMergedProperty(res), nil +} + +// data loader + +type PropertyDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.Property, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Property, []error) +} + +func (c *PropertyLoader) DataLoader(ctx context.Context) PropertyDataLoader { + return gqldataloader.NewPropertyLoader(gqldataloader.PropertyLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *PropertyLoader) OrdinaryDataLoader(ctx context.Context) PropertyDataLoader { + return &ordinaryPropertyLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryPropertyLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) +} + +func (l *ordinaryPropertyLoader) Load(key gqlmodel.ID) (*gqlmodel.Property, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryPropertyLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) { + return l.fetch(keys) +} + +type PropertySchemaDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.PropertySchema, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) +} + +func (c *PropertyLoader) SchemaDataLoader(ctx context.Context) PropertySchemaDataLoader { + return gqldataloader.NewPropertySchemaLoader(gqldataloader.PropertySchemaLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) { + return c.FetchSchema(ctx, keys) + }, + }) +} + +func (c *PropertyLoader) SchemaOrdinaryDataLoader(ctx context.Context) PropertySchemaDataLoader { + return &ordinaryPropertySchemaLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) { + return c.FetchSchema(ctx, keys) + }, + } +} + +type ordinaryPropertySchemaLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) +} + +func (l *ordinaryPropertySchemaLoader) Load(key gqlmodel.ID) (*gqlmodel.PropertySchema, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryPropertySchemaLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) { + return l.fetch(keys) +} diff --git a/server/internal/adapter/gql/loader_scene.go b/server/internal/adapter/gql/loader_scene.go new file mode 100644 index 000000000..89a09cb4c --- /dev/null +++ b/server/internal/adapter/gql/loader_scene.go @@ -0,0 +1,95 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" +) + +type SceneLoader struct { + usecase interfaces.Scene +} + +func NewSceneLoader(usecase interfaces.Scene) *SceneLoader { + return &SceneLoader{usecase: usecase} +} + +func (c *SceneLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Scene, []error) { + pids, err := util.TryMap(ids, gqlmodel.ToID[id.Scene]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, pids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + scenes := make([]*gqlmodel.Scene, 0, len(res)) + for _, scene := range res { + scenes = append(scenes, gqlmodel.ToScene(scene)) + } + return scenes, nil +} + +func (c *SceneLoader) FindByProject(ctx context.Context, projectID gqlmodel.ID) (*gqlmodel.Scene, error) { + pid, err := gqlmodel.ToID[id.Project](projectID) + if err != nil { + return nil, err + } + + res, err := c.usecase.FindByProject(ctx, pid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return gqlmodel.ToScene(res), nil +} + +// data loader + +type SceneDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.Scene, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Scene, []error) +} + +func (c *SceneLoader) DataLoader(ctx context.Context) SceneDataLoader { + return gqldataloader.NewSceneLoader(gqldataloader.SceneLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *SceneLoader) OrdinaryDataLoader(ctx context.Context) SceneDataLoader { + return &ordinarySceneLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinarySceneLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) +} + +func (l *ordinarySceneLoader) Load(key gqlmodel.ID) (*gqlmodel.Scene, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinarySceneLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) { + return l.fetch(keys) +} diff --git a/server/internal/adapter/gql/loader_tag.go b/server/internal/adapter/gql/loader_tag.go new file mode 100644 index 000000000..c88542b95 --- /dev/null +++ b/server/internal/adapter/gql/loader_tag.go @@ -0,0 +1,213 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" +) + +type TagLoader struct { + usecase interfaces.Tag +} + +func NewTagLoader(usecase interfaces.Tag) *TagLoader { + return &TagLoader{usecase: usecase} +} + +func (c *TagLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Tag, []error) { + tagids, err := util.TryMap(ids, gqlmodel.ToID[id.Tag]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, tagids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + tags := make([]*gqlmodel.Tag, 0, len(res)) + for _, t := range res { + if t != nil { + tag := gqlmodel.ToTag(*t) + tags = append(tags, &tag) + } + } + + return tags, nil +} + +func (c *TagLoader) FetchGroup(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) { + tids, err := util.TryMap(ids, gqlmodel.ToID[id.Tag]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.FetchGroup(ctx, tids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + tagGroups := make([]*gqlmodel.TagGroup, 0, len(res)) + for _, t := range res { + tg := gqlmodel.ToTagGroup(t) + if tg != nil { + tagGroups = append(tagGroups, tg) + } + } + + return tagGroups, nil +} + +func (c *TagLoader) FetchItem(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) { + tids, err := util.TryMap(ids, gqlmodel.ToID[id.Tag]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.FetchItem(ctx, tids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + tagItems := make([]*gqlmodel.TagItem, 0, len(res)) + for _, t := range res { + ti := gqlmodel.ToTagItem(t) + if ti != nil { + tagItems = append(tagItems, ti) + } + } + + return tagItems, nil +} + +// data loaders + +type TagDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.Tag, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Tag, []error) +} + +func (c *TagLoader) DataLoader(ctx context.Context) TagDataLoader { + return gqldataloader.NewTagLoader(gqldataloader.TagLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *TagLoader) OrdinaryDataLoader(ctx context.Context) TagDataLoader { + return &ordinaryTagLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryTagLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) +} + +func (t *ordinaryTagLoader) Load(key gqlmodel.ID) (*gqlmodel.Tag, error) { + res, errs := t.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (t *ordinaryTagLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) { + return t.fetch(keys) +} + +type TagItemDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.TagItem, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.TagItem, []error) +} + +func (c *TagLoader) ItemDataLoader(ctx context.Context) TagItemDataLoader { + return gqldataloader.NewTagItemLoader(gqldataloader.TagItemLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) { + return c.FetchItem(ctx, keys) + }, + }) +} + +func (c *TagLoader) ItemOrdinaryDataLoader(ctx context.Context) TagItemDataLoader { + return &ordinaryTagItemLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) { + return c.FetchItem(ctx, keys) + }, + } +} + +type ordinaryTagItemLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) +} + +func (t *ordinaryTagItemLoader) Load(key gqlmodel.ID) (*gqlmodel.TagItem, error) { + res, errs := t.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (t *ordinaryTagItemLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) { + return t.fetch(keys) +} + +type TagGroupDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.TagGroup, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) +} + +func (c *TagLoader) GroupDataLoader(ctx context.Context) TagGroupDataLoader { + return gqldataloader.NewTagGroupLoader(gqldataloader.TagGroupLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) { + return c.FetchGroup(ctx, keys) + }, + }) +} + +func (c *TagLoader) GroupOrdinaryDataLoader(ctx context.Context) TagGroupDataLoader { + return &ordinaryTagGroupLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) { + return c.FetchGroup(ctx, keys) + }, + } +} + +type ordinaryTagGroupLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) +} + +func (t *ordinaryTagGroupLoader) Load(key gqlmodel.ID) (*gqlmodel.TagGroup, error) { + res, errs := t.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (t *ordinaryTagGroupLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) { + return t.fetch(keys) +} diff --git a/server/internal/adapter/gql/loader_team.go b/server/internal/adapter/gql/loader_team.go new file mode 100644 index 000000000..bfaeee35b --- /dev/null +++ b/server/internal/adapter/gql/loader_team.go @@ -0,0 +1,98 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" +) + +type TeamLoader struct { + usecase interfaces.Team +} + +func NewTeamLoader(usecase interfaces.Team) *TeamLoader { + return &TeamLoader{usecase: usecase} +} + +func (c *TeamLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Team, []error) { + uids, err := util.TryMap(ids, gqlmodel.ToID[id.Team]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, uids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + teams := make([]*gqlmodel.Team, 0, len(res)) + for _, t := range res { + teams = append(teams, gqlmodel.ToTeam(t)) + } + return teams, nil +} + +func (c *TeamLoader) FindByUser(ctx context.Context, uid gqlmodel.ID) ([]*gqlmodel.Team, error) { + userid, err := gqlmodel.ToID[id.User](uid) + if err != nil { + return nil, err + } + + res, err := c.usecase.FindByUser(ctx, userid, getOperator(ctx)) + if err != nil { + return nil, err + } + teams := make([]*gqlmodel.Team, 0, len(res)) + for _, t := range res { + teams = append(teams, gqlmodel.ToTeam(t)) + } + return teams, nil +} + +// data loader + +type TeamDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.Team, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Team, []error) +} + +func (c *TeamLoader) DataLoader(ctx context.Context) TeamDataLoader { + return gqldataloader.NewTeamLoader(gqldataloader.TeamLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *TeamLoader) OrdinaryDataLoader(ctx context.Context) TeamDataLoader { + return &ordinaryTeamLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryTeamLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) +} + +func (l *ordinaryTeamLoader) Load(key gqlmodel.ID) (*gqlmodel.Team, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryTeamLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) { + return l.fetch(keys) +} diff --git a/server/internal/adapter/gql/loader_user.go b/server/internal/adapter/gql/loader_user.go new file mode 100644 index 000000000..dcd0462c8 --- /dev/null +++ b/server/internal/adapter/gql/loader_user.go @@ -0,0 +1,91 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" +) + +type UserLoader struct { + usecase interfaces.User +} + +func NewUserLoader(usecase interfaces.User) *UserLoader { + return &UserLoader{usecase: usecase} +} + +func (c *UserLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.User, []error) { + uids, err := util.TryMap(ids, gqlmodel.ToID[id.User]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, uids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + users := make([]*gqlmodel.User, 0, len(res)) + for _, u := range res { + users = append(users, gqlmodel.ToUser(u)) + } + + return users, nil +} + +func (c *UserLoader) SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.User, error) { + res, err := c.usecase.SearchUser(ctx, nameOrEmail, getOperator(ctx)) + if err != nil { + return nil, err + } + + return gqlmodel.ToUser(res), nil +} + +// data loader + +type UserDataLoader interface { + Load(gqlmodel.ID) (*gqlmodel.User, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.User, []error) +} + +func (c *UserLoader) DataLoader(ctx context.Context) UserDataLoader { + return gqldataloader.NewUserLoader(gqldataloader.UserLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *UserLoader) OrdinaryDataLoader(ctx context.Context) UserDataLoader { + return &ordinaryUserLoader{ + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryUserLoader struct { + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) +} + +func (l *ordinaryUserLoader) Load(key gqlmodel.ID) (*gqlmodel.User, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryUserLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) { + return l.fetch(keys) +} diff --git a/server/internal/adapter/gql/resolver.go b/server/internal/adapter/gql/resolver.go new file mode 100644 index 000000000..40f45c3a2 --- /dev/null +++ b/server/internal/adapter/gql/resolver.go @@ -0,0 +1,19 @@ +//go:generate go run github.com/99designs/gqlgen + +package gql + +import ( + "errors" +) + +// THIS CODE IS A STARTING POINT ONLY. IT WILL NOT BE UPDATED WITH SCHEMA CHANGES. + +var ErrNotImplemented = errors.New("not impleneted yet") +var ErrUnauthorized = errors.New("unauthorized") + +type Resolver struct { +} + +func NewResolver() ResolverRoot { + return &Resolver{} +} diff --git a/server/internal/adapter/gql/resolver_asset.go b/server/internal/adapter/gql/resolver_asset.go new file mode 100644 index 000000000..f9d85fabf --- /dev/null +++ b/server/internal/adapter/gql/resolver_asset.go @@ -0,0 +1,17 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +func (r *Resolver) Asset() AssetResolver { + return &assetResolver{r} +} + +type assetResolver struct{ *Resolver } + +func (r *assetResolver) Team(ctx context.Context, obj *gqlmodel.Asset) (*gqlmodel.Team, error) { + return dataloaders(ctx).Team.Load(obj.TeamID) +} diff --git a/server/internal/adapter/gql/resolver_dataset.go b/server/internal/adapter/gql/resolver_dataset.go new file mode 100644 index 000000000..4edd29807 --- /dev/null +++ b/server/internal/adapter/gql/resolver_dataset.go @@ -0,0 +1,59 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +func (r *Resolver) Dataset() DatasetResolver { + return &datasetResolver{r} +} + +func (r *Resolver) DatasetField() DatasetFieldResolver { + return &datasetFieldResolver{r} +} + +type datasetResolver struct{ *Resolver } + +func (r *datasetResolver) Schema(ctx context.Context, obj *gqlmodel.Dataset) (*gqlmodel.DatasetSchema, error) { + return dataloaders(ctx).DatasetSchema.Load(obj.SchemaID) +} + +func (r *datasetResolver) Name(ctx context.Context, obj *gqlmodel.Dataset) (*string, error) { + ds, err := dataloaders(ctx).DatasetSchema.Load(obj.SchemaID) + if err != nil || ds == nil || ds.RepresentativeFieldID == nil { + return nil, err + } + f := obj.Field(*ds.RepresentativeFieldID) + if f == nil { + return nil, nil + } + if v, ok := f.Value.(string); ok { + v2 := &v + return v2, nil + } + return nil, nil +} + +type datasetFieldResolver struct{ *Resolver } + +func (r *datasetFieldResolver) Field(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.DatasetSchemaField, error) { + ds, err := dataloaders(ctx).DatasetSchema.Load(obj.SchemaID) + return ds.Field(obj.FieldID), err +} + +func (r *datasetFieldResolver) Schema(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.DatasetSchema, error) { + return dataloaders(ctx).DatasetSchema.Load(obj.SchemaID) +} + +func (r *datasetFieldResolver) ValueRef(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.Dataset, error) { + if obj.Value == nil || obj.Type != gqlmodel.ValueTypeRef { + return nil, nil + } + idstr, ok := (obj.Value).(string) + if !ok { + return nil, nil + } + return dataloaders(ctx).Dataset.Load(gqlmodel.ID(idstr)) +} diff --git a/server/internal/adapter/gql/resolver_dataset_schema.go b/server/internal/adapter/gql/resolver_dataset_schema.go new file mode 100644 index 000000000..dd6729a1c --- /dev/null +++ b/server/internal/adapter/gql/resolver_dataset_schema.go @@ -0,0 +1,56 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" +) + +func (r *Resolver) DatasetSchema() DatasetSchemaResolver { + return &datasetSchemaResolver{r} +} + +func (r *Resolver) DatasetSchemaField() DatasetSchemaFieldResolver { + return &datasetSchemaFieldResolver{r} +} + +type datasetSchemaResolver struct{ *Resolver } + +func (r *datasetSchemaResolver) Scene(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.Scene, error) { + return dataloaders(ctx).Scene.Load(obj.SceneID) +} + +func (r *datasetSchemaResolver) RepresentativeField(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.DatasetSchemaField, error) { + if obj.RepresentativeFieldID == nil { + return nil, nil + } + nf := *obj.RepresentativeFieldID + for _, f := range obj.Fields { + if f.ID == nf { + return f, nil + } + } + return nil, nil +} + +func (r *datasetSchemaResolver) Datasets(ctx context.Context, obj *gqlmodel.DatasetSchema, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) { + return loaders(ctx).Dataset.FindBySchema(ctx, obj.ID, first, last, before, after) +} + +func (r *datasetSchemaResolver) TotalCount(ctx context.Context, obj *gqlmodel.DatasetSchema) (int, error) { + return loaders(ctx).Dataset.CountBySchema(ctx, obj.ID) +} + +type datasetSchemaFieldResolver struct{ *Resolver } + +func (r *datasetSchemaFieldResolver) Schema(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) { + return dataloaders(ctx).DatasetSchema.Load(obj.SchemaID) +} + +func (r *datasetSchemaFieldResolver) Ref(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) { + if obj.RefID == nil { + return nil, nil + } + return dataloaders(ctx).DatasetSchema.Load(*obj.RefID) +} diff --git a/server/internal/adapter/gql/resolver_layer.go b/server/internal/adapter/gql/resolver_layer.go new file mode 100644 index 000000000..b1e35179a --- /dev/null +++ b/server/internal/adapter/gql/resolver_layer.go @@ -0,0 +1,367 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +func (r *Resolver) LayerItem() LayerItemResolver { + return &layerItemResolver{r} +} + +func (r *Resolver) LayerGroup() LayerGroupResolver { + return &layerGroupResolver{r} +} + +func (r *Resolver) Infobox() InfoboxResolver { + return &infoboxResolver{r} +} + +func (r *Resolver) InfoboxField() InfoboxFieldResolver { + return &infoboxFieldResolver{r} +} + +func (r *Resolver) MergedLayer() MergedLayerResolver { + return &mergedLayerResolver{r} +} + +func (r *Resolver) MergedInfobox() MergedInfoboxResolver { + return &mergedInfoboxResolver{r} +} + +func (r *Resolver) MergedInfoboxField() MergedInfoboxFieldResolver { + return &mergedInfoboxFieldResolver{r} +} + +func (r *Resolver) LayerTagItem() LayerTagItemResolver { + return &layerTagItemResolver{r} +} + +func (r *Resolver) LayerTagGroup() LayerTagGroupResolver { + return &layerTagGroupResolver{r} +} + +type infoboxResolver struct{ *Resolver } + +func (r *infoboxResolver) Property(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Property, error) { + return dataloaders(ctx).Property.Load(obj.PropertyID) +} + +func (r *infoboxResolver) Layer(ctx context.Context, obj *gqlmodel.Infobox) (gqlmodel.Layer, error) { + layer, err := dataloaders(ctx).Layer.Load(obj.LayerID) + if err != nil || layer == nil { + return nil, err + } + return *layer, nil +} + +func (r *infoboxResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Dataset, error) { + if obj.LinkedDatasetID == nil { + return nil, nil + } + return dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) +} + +func (r *infoboxResolver) Merged(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.MergedInfobox, error) { + ml, err := loaders(ctx).Layer.FetchParentAndMerged(ctx, obj.LayerID) + if err != nil || ml == nil { + return nil, err + } + return ml.Infobox, nil +} + +func (r *infoboxResolver) Scene(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Scene, error) { + return dataloaders(ctx).Scene.Load(obj.SceneID) +} + +func (r *infoboxResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.ScenePlugin, error) { + layer, err := dataloaders(ctx).Layer.Load(obj.LayerID) + if err != nil || layer == nil { + return nil, err + } + + var pluginID *gqlmodel.ID + if lg, ok := (*layer).(*gqlmodel.LayerGroup); ok { + pluginID = lg.PluginID + } else if li, ok := (*layer).(*gqlmodel.LayerItem); ok { + pluginID = li.PluginID + } + if pluginID == nil { + return nil, nil + } + + s, err := dataloaders(ctx).Scene.Load(obj.SceneID) + if err != nil { + return nil, err + } + return s.Plugin(*pluginID), nil +} + +type infoboxFieldResolver struct{ *Resolver } + +func (r *infoboxFieldResolver) Layer(ctx context.Context, obj *gqlmodel.InfoboxField) (gqlmodel.Layer, error) { + layer, err := dataloaders(ctx).Layer.Load(obj.LayerID) + if err != nil { + return nil, err + } + return *layer, nil +} + +func (r *infoboxFieldResolver) Infobox(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Infobox, error) { + layer, err := dataloaders(ctx).Layer.Load(obj.LayerID) + if err != nil || layer == nil { + return nil, err + } + layer2 := (*layer).(*gqlmodel.LayerItem) + if layer2 == nil { + return nil, nil + } + return layer2.Infobox, nil +} + +func (r *infoboxFieldResolver) Property(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Property, error) { + return dataloaders(ctx).Property.Load(obj.PropertyID) +} + +func (r *infoboxFieldResolver) Plugin(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Plugin, error) { + return dataloaders(ctx).Plugin.Load(obj.PluginID) +} + +func (r *infoboxFieldResolver) Extension(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.PluginExtension, error) { + plugin, err := dataloaders(ctx).Plugin.Load(obj.PluginID) + if err != nil { + return nil, err + } + return plugin.Extension(obj.ExtensionID), nil +} + +func (r *infoboxFieldResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Dataset, error) { + if obj.LinkedDatasetID == nil { + return nil, nil + } + return dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) +} + +func (r *infoboxFieldResolver) Merged(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.MergedInfoboxField, error) { + ml, err := loaders(ctx).Layer.FetchParentAndMerged(ctx, obj.LayerID) + if err != nil || ml == nil || ml.Infobox == nil { + return nil, err + } + return ml.Infobox.Field(obj.ID), nil +} + +func (r *infoboxFieldResolver) Scene(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Scene, error) { + return dataloaders(ctx).Scene.Load(obj.SceneID) +} + +func (r *infoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.ScenePlugin, error) { + s, err := dataloaders(ctx).Scene.Load(obj.SceneID) + if err != nil { + return nil, err + } + return s.Plugin(obj.PluginID), nil +} + +type layerGroupResolver struct{ *Resolver } + +func (r *layerGroupResolver) Parent(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.LayerGroup, error) { + if obj.ParentID != nil { + return dataloaders(ctx).LayerGroup.Load(*obj.ParentID) + } + return loaders(ctx).Layer.FetchParent(ctx, obj.ID) +} + +func (r *layerGroupResolver) Property(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Property, error) { + if obj.PropertyID == nil { + return nil, nil + } + return dataloaders(ctx).Property.Load(*obj.PropertyID) +} + +func (r *layerGroupResolver) Plugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Plugin, error) { + if obj.PluginID == nil { + return nil, nil + } + return dataloaders(ctx).Plugin.Load(*obj.PluginID) +} + +func (r *layerGroupResolver) Extension(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.PluginExtension, error) { + if obj.PluginID == nil || obj.ExtensionID == nil { + return nil, nil + } + plugin, err := dataloaders(ctx).Plugin.Load(*obj.PluginID) + if err != nil { + return nil, err + } + return plugin.Extension(*obj.ExtensionID), nil +} + +func (r *layerGroupResolver) ParentLayer(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.LayerGroup, error) { + return loaders(ctx).Layer.FetchParent(ctx, obj.ID) +} + +func (r *layerGroupResolver) LinkedDatasetSchema(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.DatasetSchema, error) { + if obj.LinkedDatasetSchemaID == nil { + return nil, nil + } + return dataloaders(ctx).DatasetSchema.Load(*obj.LinkedDatasetSchemaID) +} + +func (r *layerGroupResolver) Layers(ctx context.Context, obj *gqlmodel.LayerGroup) ([]gqlmodel.Layer, error) { + layers, err := dataloaders(ctx).Layer.LoadAll(obj.LayerIds) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + return gqlmodel.AttachParentLayer(layers, obj.ID), nil +} + +func (r *layerGroupResolver) Scene(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Scene, error) { + return dataloaders(ctx).Scene.Load(obj.SceneID) +} + +func (r *layerGroupResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.ScenePlugin, error) { + if obj.PluginID == nil { + return nil, nil + } + s, err := dataloaders(ctx).Scene.Load(obj.SceneID) + if err != nil { + return nil, err + } + return s.Plugin(*obj.PluginID), nil +} + +type layerItemResolver struct{ *Resolver } + +func (r *layerItemResolver) Parent(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.LayerGroup, error) { + if obj.ParentID != nil { + return dataloaders(ctx).LayerGroup.Load(*obj.ParentID) + } + return loaders(ctx).Layer.FetchParent(ctx, obj.ID) +} + +func (r *layerItemResolver) Property(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Property, error) { + if obj.PropertyID == nil { + return nil, nil + } + return dataloaders(ctx).Property.Load(*obj.PropertyID) +} + +func (r *layerItemResolver) Plugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Plugin, error) { + if obj.PluginID == nil { + return nil, nil + } + return dataloaders(ctx).Plugin.Load(*obj.PluginID) +} + +func (r *layerItemResolver) Extension(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.PluginExtension, error) { + if obj.PluginID == nil || obj.ExtensionID == nil { + return nil, nil + } + plugin, err := dataloaders(ctx).Plugin.Load(*obj.PluginID) + if err != nil { + return nil, err + } + return plugin.Extension(*obj.ExtensionID), nil +} + +func (r *layerItemResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Dataset, error) { + if obj.LinkedDatasetID == nil { + return nil, nil + } + return dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) +} + +func (r *layerItemResolver) Merged(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.MergedLayer, error) { + if obj.ParentID == nil { + return loaders(ctx).Layer.FetchParentAndMerged(ctx, obj.ID) + } + return loaders(ctx).Layer.FetchMerged(ctx, obj.ID, obj.ParentID) +} + +func (r *layerItemResolver) Scene(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Scene, error) { + return dataloaders(ctx).Scene.Load(obj.SceneID) +} + +func (r *layerItemResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.ScenePlugin, error) { + if obj.PluginID == nil { + return nil, nil + } + s, err := dataloaders(ctx).Scene.Load(obj.SceneID) + if err != nil { + return nil, err + } + return s.Plugin(*obj.PluginID), nil +} + +type mergedLayerResolver struct{ *Resolver } + +func (r *mergedLayerResolver) Original(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerItem, error) { + return dataloaders(ctx).LayerItem.Load(obj.OriginalID) +} + +func (r *mergedLayerResolver) Parent(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerGroup, error) { + if obj.ParentID == nil { + return nil, nil + } + return dataloaders(ctx).LayerGroup.Load(*obj.ParentID) +} + +func (r *mergedLayerResolver) Scene(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.Scene, error) { + if obj.ParentID == nil { + return nil, nil + } + return dataloaders(ctx).Scene.Load(obj.SceneID) +} + +type mergedInfoboxResolver struct{ *Resolver } + +func (r *mergedInfoboxResolver) Scene(ctx context.Context, obj *gqlmodel.MergedInfobox) (*gqlmodel.Scene, error) { + return dataloaders(ctx).Scene.Load(obj.SceneID) +} + +type mergedInfoboxFieldResolver struct{ *Resolver } + +func (r *mergedInfoboxFieldResolver) Plugin(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.Plugin, error) { + return dataloaders(ctx).Plugin.Load(obj.PluginID) +} + +func (r *mergedInfoboxFieldResolver) Extension(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.PluginExtension, error) { + plugin, err := dataloaders(ctx).Plugin.Load(obj.PluginID) + if err != nil { + return nil, err + } + return plugin.Extension(obj.ExtensionID), nil +} + +func (r *mergedInfoboxFieldResolver) Scene(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.Scene, error) { + return dataloaders(ctx).Scene.Load(obj.SceneID) +} + +func (r *mergedInfoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.ScenePlugin, error) { + s, err := dataloaders(ctx).Scene.Load(obj.SceneID) + if err != nil { + return nil, err + } + return s.Plugin(obj.PluginID), nil +} + +type layerTagItemResolver struct{ *Resolver } + +func (r *layerTagItemResolver) Tag(ctx context.Context, obj *gqlmodel.LayerTagItem) (gqlmodel.Tag, error) { + t, err := dataloaders(ctx).Tag.Load(obj.TagID) + if err != nil { + return nil, err + } + return *t, nil +} + +type layerTagGroupResolver struct{ *Resolver } + +func (r *layerTagGroupResolver) Tag(ctx context.Context, obj *gqlmodel.LayerTagGroup) (gqlmodel.Tag, error) { + t, err := dataloaders(ctx).Tag.Load(obj.TagID) + if err != nil { + return nil, err + } + return *t, nil +} diff --git a/server/internal/adapter/gql/resolver_mutation.go b/server/internal/adapter/gql/resolver_mutation.go new file mode 100644 index 000000000..eb6a29a89 --- /dev/null +++ b/server/internal/adapter/gql/resolver_mutation.go @@ -0,0 +1,7 @@ +package gql + +func (r *Resolver) Mutation() MutationResolver { + return &mutationResolver{r} +} + +type mutationResolver struct{ *Resolver } diff --git a/server/internal/adapter/gql/resolver_mutation_asset.go b/server/internal/adapter/gql/resolver_mutation_asset.go new file mode 100644 index 000000000..ef0188838 --- /dev/null +++ b/server/internal/adapter/gql/resolver_mutation_asset.go @@ -0,0 +1,40 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *mutationResolver) CreateAsset(ctx context.Context, input gqlmodel.CreateAssetInput) (*gqlmodel.CreateAssetPayload, error) { + tid, err := gqlmodel.ToID[id.Team](input.TeamID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Asset.Create(ctx, interfaces.CreateAssetParam{ + TeamID: tid, + File: gqlmodel.FromFile(&input.File), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.CreateAssetPayload{Asset: gqlmodel.ToAsset(res)}, nil +} + +func (r *mutationResolver) RemoveAsset(ctx context.Context, input gqlmodel.RemoveAssetInput) (*gqlmodel.RemoveAssetPayload, error) { + aid, err := gqlmodel.ToID[id.Asset](input.AssetID) + if err != nil { + return nil, err + } + + res, err2 := usecases(ctx).Asset.Remove(ctx, aid, getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + return &gqlmodel.RemoveAssetPayload{AssetID: gqlmodel.IDFrom(res)}, nil +} diff --git a/server/internal/adapter/gql/resolver_mutation_dataset.go b/server/internal/adapter/gql/resolver_mutation_dataset.go new file mode 100644 index 000000000..87d80e6a0 --- /dev/null +++ b/server/internal/adapter/gql/resolver_mutation_dataset.go @@ -0,0 +1,156 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" +) + +func (r *mutationResolver) UpdateDatasetSchema(ctx context.Context, input gqlmodel.UpdateDatasetSchemaInput) (*gqlmodel.UpdateDatasetSchemaPayload, error) { + dsid, err := gqlmodel.ToID[id.DatasetSchema](input.SchemaID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Dataset.UpdateDatasetSchema(ctx, interfaces.UpdateDatasetSchemaParam{ + SchemaId: dsid, + Name: input.Name, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateDatasetSchemaPayload{DatasetSchema: gqlmodel.ToDatasetSchema(res)}, nil +} + +func (r *mutationResolver) AddDynamicDatasetSchema(ctx context.Context, input gqlmodel.AddDynamicDatasetSchemaInput) (*gqlmodel.AddDynamicDatasetSchemaPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Dataset.AddDynamicDatasetSchema(ctx, interfaces.AddDynamicDatasetSchemaParam{ + SceneId: sid, + }) + if err != nil { + return nil, err + } + + return &gqlmodel.AddDynamicDatasetSchemaPayload{DatasetSchema: gqlmodel.ToDatasetSchema(res)}, nil +} + +func (r *mutationResolver) AddDynamicDataset(ctx context.Context, input gqlmodel.AddDynamicDatasetInput) (*gqlmodel.AddDynamicDatasetPayload, error) { + dsid, err := gqlmodel.ToID[id.DatasetSchema](input.DatasetSchemaID) + if err != nil { + return nil, err + } + + dss, ds, err := usecases(ctx).Dataset.AddDynamicDataset(ctx, interfaces.AddDynamicDatasetParam{ + SchemaId: dsid, + Author: input.Author, + Content: input.Content, + Lat: input.Lat, + Lng: input.Lng, + Target: input.Target, + }) + if err != nil { + return nil, err + } + + return &gqlmodel.AddDynamicDatasetPayload{DatasetSchema: gqlmodel.ToDatasetSchema(dss), Dataset: gqlmodel.ToDataset(ds)}, nil +} + +func (r *mutationResolver) SyncDataset(ctx context.Context, input gqlmodel.SyncDatasetInput) (*gqlmodel.SyncDatasetPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + dss, ds, err := usecases(ctx).Dataset.Sync(ctx, sid, input.URL, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.SyncDatasetPayload{ + SceneID: input.SceneID, + URL: input.URL, + DatasetSchema: util.Map(dss, gqlmodel.ToDatasetSchema), + Dataset: util.Map(ds, gqlmodel.ToDataset), + }, nil +} + +func (r *mutationResolver) RemoveDatasetSchema(ctx context.Context, input gqlmodel.RemoveDatasetSchemaInput) (*gqlmodel.RemoveDatasetSchemaPayload, error) { + sid, err := gqlmodel.ToID[id.DatasetSchema](input.SchemaID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Dataset.RemoveDatasetSchema(ctx, interfaces.RemoveDatasetSchemaParam{ + SchemaID: sid, + Force: input.Force, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveDatasetSchemaPayload{SchemaID: gqlmodel.IDFrom(res)}, nil +} + +func (r *mutationResolver) AddDatasetSchema(ctx context.Context, input gqlmodel.AddDatasetSchemaInput) (*gqlmodel.AddDatasetSchemaPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Dataset.AddDatasetSchema(ctx, interfaces.AddDatasetSchemaParam{ + SceneId: sid, + Name: input.Name, + RepresentativeField: gqlmodel.ToIDRef[id.DatasetField](input.Representativefield), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.AddDatasetSchemaPayload{DatasetSchema: gqlmodel.ToDatasetSchema(res)}, nil +} + +func (r *mutationResolver) ImportDataset(ctx context.Context, input gqlmodel.ImportDatasetInput) (*gqlmodel.ImportDatasetPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Dataset.ImportDataset(ctx, interfaces.ImportDatasetParam{ + SceneId: sid, + SchemaId: gqlmodel.ToIDRef[id.DatasetSchema](input.DatasetSchemaID), + File: gqlmodel.FromFile(&input.File), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.ImportDatasetPayload{DatasetSchema: gqlmodel.ToDatasetSchema(res)}, nil +} + +func (r *mutationResolver) ImportDatasetFromGoogleSheet(ctx context.Context, input gqlmodel.ImportDatasetFromGoogleSheetInput) (*gqlmodel.ImportDatasetPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Dataset.ImportDatasetFromGoogleSheet(ctx, interfaces.ImportDatasetFromGoogleSheetParam{ + Token: input.AccessToken, + FileID: input.FileID, + SheetName: input.SheetName, + SceneId: sid, + SchemaId: gqlmodel.ToIDRef[id.DatasetSchema](input.DatasetSchemaID), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.ImportDatasetPayload{DatasetSchema: gqlmodel.ToDatasetSchema(res)}, nil +} diff --git a/server/internal/adapter/gql/resolver_mutation_layer.go b/server/internal/adapter/gql/resolver_mutation_layer.go new file mode 100644 index 000000000..358715c75 --- /dev/null +++ b/server/internal/adapter/gql/resolver_mutation_layer.go @@ -0,0 +1,284 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/samber/lo" +) + +func (r *mutationResolver) AddLayerItem(ctx context.Context, input gqlmodel.AddLayerItemInput) (*gqlmodel.AddLayerItemPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.ParentLayerID) + if err != nil { + return nil, err + } + + // layers are no longer extendable with plugins + // pid, err := gqlmodel.ToPluginID(input.PluginID) + // if err != nil { + // return nil, err + // } + + layer, parent, err := usecases(ctx).Layer.AddItem(ctx, interfaces.AddLayerItemInput{ + ParentLayerID: lid, + // layers are no longer extendable with plugins + // PluginID: &pid, + ExtensionID: lo.ToPtr(id.PluginExtensionID(input.ExtensionID)), + Index: input.Index, + Name: gqlmodel.RefToString(input.Name), + LatLng: gqlmodel.ToPropertyLatLng(input.Lat, input.Lng), + // LinkedDatasetID: input.LinkedDatasetID, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.AddLayerItemPayload{ + Layer: gqlmodel.ToLayerItem(layer, parent.IDRef()), + ParentLayer: gqlmodel.ToLayerGroup(parent, nil), + Index: input.Index, + }, nil +} + +func (r *mutationResolver) AddLayerGroup(ctx context.Context, input gqlmodel.AddLayerGroupInput) (*gqlmodel.AddLayerGroupPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.ParentLayerID) + if err != nil { + return nil, err + } + + layer, parent, err := usecases(ctx).Layer.AddGroup(ctx, interfaces.AddLayerGroupInput{ + ParentLayerID: lid, + // layers are no longer extendable with plugins + // PluginID: gqlmodel.ToPluginIDRef(input.PluginID), + ExtensionID: gqlmodel.ToStringIDRef[id.PluginExtension](input.ExtensionID), + Index: input.Index, + Name: gqlmodel.RefToString(input.Name), + LinkedDatasetSchemaID: gqlmodel.ToIDRef[id.DatasetSchema](input.LinkedDatasetSchemaID), + RepresentativeFieldId: gqlmodel.ToIDRef[id.DatasetField](input.RepresentativeFieldID), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.AddLayerGroupPayload{ + Layer: gqlmodel.ToLayerGroup(layer, parent.IDRef()), + ParentLayer: gqlmodel.ToLayerGroup(parent, nil), + Index: input.Index, + }, nil +} + +func (r *mutationResolver) RemoveLayer(ctx context.Context, input gqlmodel.RemoveLayerInput) (*gqlmodel.RemoveLayerPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + + id, layer, err := usecases(ctx).Layer.Remove(ctx, lid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveLayerPayload{ + LayerID: gqlmodel.IDFrom(id), + ParentLayer: gqlmodel.ToLayerGroup(layer, nil), + }, nil +} + +func (r *mutationResolver) UpdateLayer(ctx context.Context, input gqlmodel.UpdateLayerInput) (*gqlmodel.UpdateLayerPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + + layer, err := usecases(ctx).Layer.Update(ctx, interfaces.UpdateLayerInput{ + LayerID: lid, + Name: input.Name, + Visible: input.Visible, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateLayerPayload{ + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) MoveLayer(ctx context.Context, input gqlmodel.MoveLayerInput) (*gqlmodel.MoveLayerPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + + targetLayerID, layerGroupFrom, layerGroupTo, index, err := usecases(ctx).Layer.Move(ctx, interfaces.MoveLayerInput{ + LayerID: lid, + DestLayerID: gqlmodel.ToIDRef[id.Layer](input.DestLayerID), + Index: gqlmodel.RefToIndex(input.Index), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.MoveLayerPayload{ + LayerID: gqlmodel.IDFrom(targetLayerID), + FromParentLayer: gqlmodel.ToLayerGroup(layerGroupFrom, nil), + ToParentLayer: gqlmodel.ToLayerGroup(layerGroupTo, nil), + Index: index, + }, nil +} + +func (r *mutationResolver) CreateInfobox(ctx context.Context, input gqlmodel.CreateInfoboxInput) (*gqlmodel.CreateInfoboxPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + + layer, err := usecases(ctx).Layer.CreateInfobox(ctx, lid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.CreateInfoboxPayload{ + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) RemoveInfobox(ctx context.Context, input gqlmodel.RemoveInfoboxInput) (*gqlmodel.RemoveInfoboxPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + + layer, err := usecases(ctx).Layer.RemoveInfobox(ctx, lid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveInfoboxPayload{ + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) AddInfoboxField(ctx context.Context, input gqlmodel.AddInfoboxFieldInput) (*gqlmodel.AddInfoboxFieldPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + + pid, err := gqlmodel.ToPluginID(input.PluginID) + if err != nil { + return nil, err + } + + infoboxField, layer, err := usecases(ctx).Layer.AddInfoboxField(ctx, interfaces.AddInfoboxFieldParam{ + LayerID: lid, + PluginID: pid, + ExtensionID: id.PluginExtensionID(input.ExtensionID), + Index: input.Index, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.AddInfoboxFieldPayload{ + InfoboxField: gqlmodel.ToInfoboxField(infoboxField, layer.Scene(), nil), + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) MoveInfoboxField(ctx context.Context, input gqlmodel.MoveInfoboxFieldInput) (*gqlmodel.MoveInfoboxFieldPayload, error) { + lid, ifid, err := gqlmodel.ToID2[id.Layer, id.InfoboxField](input.LayerID, input.InfoboxFieldID) + if err != nil { + return nil, err + } + + infoboxField, layer, index, err := usecases(ctx).Layer.MoveInfoboxField(ctx, interfaces.MoveInfoboxFieldParam{ + LayerID: lid, + InfoboxFieldID: ifid, + Index: input.Index, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.MoveInfoboxFieldPayload{ + InfoboxFieldID: gqlmodel.IDFrom(infoboxField), + Layer: gqlmodel.ToLayer(layer, nil), + Index: index, + }, nil +} + +func (r *mutationResolver) RemoveInfoboxField(ctx context.Context, input gqlmodel.RemoveInfoboxFieldInput) (*gqlmodel.RemoveInfoboxFieldPayload, error) { + lid, ifid, err := gqlmodel.ToID2[id.Layer, id.InfoboxField](input.LayerID, input.InfoboxFieldID) + if err != nil { + return nil, err + } + + infoboxField, layer, err := usecases(ctx).Layer.RemoveInfoboxField(ctx, interfaces.RemoveInfoboxFieldParam{ + LayerID: lid, + InfoboxFieldID: ifid, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveInfoboxFieldPayload{ + InfoboxFieldID: gqlmodel.IDFrom(infoboxField), + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) ImportLayer(ctx context.Context, input gqlmodel.ImportLayerInput) (*gqlmodel.ImportLayerPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + + l, l2, err := usecases(ctx).Layer.ImportLayer(ctx, interfaces.ImportLayerParam{ + LayerID: lid, + File: gqlmodel.FromFile(&input.File), + Format: gqlmodel.FromLayerEncodingFormat(input.Format), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.ImportLayerPayload{ + Layers: gqlmodel.ToLayers(l, l2.IDRef()), + ParentLayer: gqlmodel.ToLayerGroup(l2, nil), + }, err +} + +func (r *mutationResolver) AttachTagToLayer(ctx context.Context, input gqlmodel.AttachTagToLayerInput) (*gqlmodel.AttachTagToLayerPayload, error) { + lid, tid, err := gqlmodel.ToID2[id.Layer, id.Tag](input.LayerID, input.TagID) + if err != nil { + return nil, err + } + + layer, err := usecases(ctx).Layer.AttachTag(ctx, lid, tid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.AttachTagToLayerPayload{ + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) DetachTagFromLayer(ctx context.Context, input gqlmodel.DetachTagFromLayerInput) (*gqlmodel.DetachTagFromLayerPayload, error) { + lid, tid, err := gqlmodel.ToID2[id.Layer, id.Tag](input.LayerID, input.TagID) + if err != nil { + return nil, err + } + + layer, err := usecases(ctx).Layer.DetachTag(ctx, lid, tid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.DetachTagFromLayerPayload{ + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} diff --git a/server/internal/adapter/gql/resolver_mutation_project.go b/server/internal/adapter/gql/resolver_mutation_project.go new file mode 100644 index 000000000..5fc8b7878 --- /dev/null +++ b/server/internal/adapter/gql/resolver_mutation_project.go @@ -0,0 +1,103 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +func (r *mutationResolver) CreateProject(ctx context.Context, input gqlmodel.CreateProjectInput) (*gqlmodel.ProjectPayload, error) { + tid, err := gqlmodel.ToID[id.Team](input.TeamID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Project.Create(ctx, interfaces.CreateProjectParam{ + TeamID: tid, + Visualizer: visualizer.Visualizer(input.Visualizer), + Name: input.Name, + Description: input.Description, + ImageURL: input.ImageURL, + Alias: input.Alias, + Archived: input.Archived, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.ProjectPayload{Project: gqlmodel.ToProject(res)}, nil +} + +func (r *mutationResolver) UpdateProject(ctx context.Context, input gqlmodel.UpdateProjectInput) (*gqlmodel.ProjectPayload, error) { + deletePublicImage := false + if input.DeletePublicImage != nil { + deletePublicImage = *input.DeletePublicImage + } + + deleteImageURL := false + if input.DeleteImageURL != nil { + deleteImageURL = *input.DeleteImageURL + } + + pid, err := gqlmodel.ToID[id.Project](input.ProjectID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Project.Update(ctx, interfaces.UpdateProjectParam{ + ID: pid, + Name: input.Name, + Description: input.Description, + Alias: input.Alias, + ImageURL: input.ImageURL, + Archived: input.Archived, + IsBasicAuthActive: input.IsBasicAuthActive, + BasicAuthUsername: input.BasicAuthUsername, + BasicAuthPassword: input.BasicAuthPassword, + PublicTitle: input.PublicTitle, + PublicDescription: input.PublicDescription, + PublicImage: input.PublicImage, + PublicNoIndex: input.PublicNoIndex, + DeletePublicImage: deletePublicImage, + DeleteImageURL: deleteImageURL, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.ProjectPayload{Project: gqlmodel.ToProject(res)}, nil +} + +func (r *mutationResolver) PublishProject(ctx context.Context, input gqlmodel.PublishProjectInput) (*gqlmodel.ProjectPayload, error) { + pid, err := gqlmodel.ToID[id.Project](input.ProjectID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Project.Publish(ctx, interfaces.PublishProjectParam{ + ID: pid, + Alias: input.Alias, + Status: gqlmodel.FromPublishmentStatus(input.Status), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.ProjectPayload{Project: gqlmodel.ToProject(res)}, nil +} + +func (r *mutationResolver) DeleteProject(ctx context.Context, input gqlmodel.DeleteProjectInput) (*gqlmodel.DeleteProjectPayload, error) { + pid, err := gqlmodel.ToID[id.Project](input.ProjectID) + if err != nil { + return nil, err + } + + if err := usecases(ctx).Project.Delete(ctx, pid, getOperator(ctx)); err != nil { + return nil, err + } + + return &gqlmodel.DeleteProjectPayload{ProjectID: input.ProjectID}, nil +} diff --git a/server/internal/adapter/gql/resolver_mutation_property.go b/server/internal/adapter/gql/resolver_mutation_property.go new file mode 100644 index 000000000..e2ba6c9b7 --- /dev/null +++ b/server/internal/adapter/gql/resolver_mutation_property.go @@ -0,0 +1,271 @@ +package gql + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/util" + "github.com/samber/lo" +) + +func (r *mutationResolver) UpdatePropertyValue(ctx context.Context, input gqlmodel.UpdatePropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) { + var v *property.Value + if input.Value != nil { + v = gqlmodel.FromPropertyValueAndType(input.Value, input.Type) + if v == nil { + return nil, errors.New("invalid value") + } + } + + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + + pp, pgl, pg, pf, err := usecases(ctx).Property.UpdateValue(ctx, interfaces.UpdatePropertyValueParam{ + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + gqlmodel.ToStringIDRef[id.PropertySchemaGroup](input.SchemaGroupID), + input.ItemID, + gqlmodel.ToStringIDRef[id.PropertyField](&input.FieldID), + ), + Value: v, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyFieldPayload{ + Property: gqlmodel.ToProperty(pp), + PropertyField: gqlmodel.ToPropertyField(pf, pp, pgl, pg), + }, nil +} + +func (r *mutationResolver) RemovePropertyField(ctx context.Context, input gqlmodel.RemovePropertyFieldInput) (*gqlmodel.PropertyFieldPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + + p, err := usecases(ctx).Property.RemoveField(ctx, interfaces.RemovePropertyFieldParam{ + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + gqlmodel.ToStringIDRef[id.PropertySchemaGroup](input.SchemaGroupID), + input.ItemID, + gqlmodel.ToStringIDRef[id.PropertyField](&input.FieldID), + ), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyFieldPayload{ + Property: gqlmodel.ToProperty(p), + }, nil +} + +func (r *mutationResolver) UploadFileToProperty(ctx context.Context, input gqlmodel.UploadFileToPropertyInput) (*gqlmodel.PropertyFieldPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + + p, pgl, pg, pf, err := usecases(ctx).Property.UploadFile(ctx, interfaces.UploadFileParam{ + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + gqlmodel.ToStringIDRef[id.PropertySchemaGroup](input.SchemaGroupID), + input.ItemID, + gqlmodel.ToStringIDRef[id.PropertyField](&input.FieldID), + ), + File: gqlmodel.FromFile(&input.File), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyFieldPayload{ + Property: gqlmodel.ToProperty(p), + PropertyField: gqlmodel.ToPropertyField(pf, p, pgl, pg), + }, nil +} + +func (r *mutationResolver) LinkDatasetToPropertyValue(ctx context.Context, input gqlmodel.LinkDatasetToPropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + + links, err := gqlmodel.FromPropertyFieldLink(input.DatasetSchemaIds, input.DatasetIds, input.DatasetSchemaFieldIds) + if err != nil { + return nil, err + } + + p, pgl, pg, pf, err := usecases(ctx).Property.LinkValue(ctx, interfaces.LinkPropertyValueParam{ + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + gqlmodel.ToStringIDRef[id.PropertySchemaGroup](input.SchemaGroupID), + input.ItemID, + lo.ToPtr(id.PropertyFieldID(input.FieldID)), + ), + Links: links, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyFieldPayload{ + Property: gqlmodel.ToProperty(p), + PropertyField: gqlmodel.ToPropertyField(pf, p, pgl, pg), + }, nil +} + +func (r *mutationResolver) UnlinkPropertyValue(ctx context.Context, input gqlmodel.UnlinkPropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + + p, pgl, pg, pf, err := usecases(ctx).Property.UnlinkValue(ctx, interfaces.UnlinkPropertyValueParam{ + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + gqlmodel.ToStringIDRef[id.PropertySchemaGroup](input.SchemaGroupID), + input.ItemID, + lo.ToPtr(id.PropertyFieldID(input.FieldID)), + ), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyFieldPayload{ + Property: gqlmodel.ToProperty(p), + PropertyField: gqlmodel.ToPropertyField(pf, p, pgl, pg), + }, nil +} + +func (r *mutationResolver) AddPropertyItem(ctx context.Context, input gqlmodel.AddPropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + + var v *property.Value + if input.NameFieldType != nil { + v = gqlmodel.FromPropertyValueAndType(input.NameFieldValue, *input.NameFieldType) + if v == nil { + return nil, errors.New("invalid name field value") + } + } + + p, pgl, pi, err := usecases(ctx).Property.AddItem(ctx, interfaces.AddPropertyItemParam{ + PropertyID: pid, + Pointer: gqlmodel.FromPointer(gqlmodel.ToStringIDRef[id.PropertySchemaGroup](&input.SchemaGroupID), nil, nil), + Index: input.Index, + NameFieldValue: v, + }, getOperator(ctx)) + + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyItemPayload{ + Property: gqlmodel.ToProperty(p), + PropertyItem: gqlmodel.ToPropertyItem(pi, p, pgl), + }, nil +} + +func (r *mutationResolver) MovePropertyItem(ctx context.Context, input gqlmodel.MovePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + + p, pgl, pi, err := usecases(ctx).Property.MoveItem(ctx, interfaces.MovePropertyItemParam{ + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + lo.ToPtr(id.PropertySchemaGroupID(input.SchemaGroupID)), + &input.ItemID, + nil, + ), + Index: input.Index, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyItemPayload{ + Property: gqlmodel.ToProperty(p), + PropertyItem: gqlmodel.ToPropertyItem(pi, p, pgl), + }, nil +} + +func (r *mutationResolver) RemovePropertyItem(ctx context.Context, input gqlmodel.RemovePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + + p, err := usecases(ctx).Property.RemoveItem(ctx, interfaces.RemovePropertyItemParam{ + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + lo.ToPtr(id.PropertySchemaGroupID(input.SchemaGroupID)), + &input.ItemID, + nil, + ), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyItemPayload{ + Property: gqlmodel.ToProperty(p), + }, nil +} + +func (r *mutationResolver) UpdatePropertyItems(ctx context.Context, input gqlmodel.UpdatePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + + operations, err := util.TryMap(input.Operations, func(o *gqlmodel.UpdatePropertyItemOperationInput) (interfaces.UpdatePropertyItemsOperationParam, error) { + var v *property.Value + if o.NameFieldType != nil { + v = gqlmodel.FromPropertyValueAndType(o.NameFieldValue, *o.NameFieldType) + if v == nil { + return interfaces.UpdatePropertyItemsOperationParam{}, errors.New("invalid name field value") + } + } + + return interfaces.UpdatePropertyItemsOperationParam{ + Operation: gqlmodel.FromListOperation(o.Operation), + ItemID: gqlmodel.ToIDRef[id.PropertyItem](o.ItemID), + Index: o.Index, + NameFieldValue: v, + }, nil + }) + if err != nil { + return nil, err + } + + p, err2 := usecases(ctx).Property.UpdateItems(ctx, interfaces.UpdatePropertyItemsParam{ + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + lo.ToPtr(id.PropertySchemaGroupID(input.SchemaGroupID)), + nil, + nil, + ), + Operations: operations, + }, getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + return &gqlmodel.PropertyItemPayload{ + Property: gqlmodel.ToProperty(p), + }, nil +} diff --git a/server/internal/adapter/gql/resolver_mutation_scene.go b/server/internal/adapter/gql/resolver_mutation_scene.go new file mode 100644 index 000000000..2578ea7d6 --- /dev/null +++ b/server/internal/adapter/gql/resolver_mutation_scene.go @@ -0,0 +1,278 @@ +package gql + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/scene" +) + +func (r *mutationResolver) CreateScene(ctx context.Context, input gqlmodel.CreateSceneInput) (*gqlmodel.CreateScenePayload, error) { + pid, err := gqlmodel.ToID[id.Project](input.ProjectID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Scene.Create(ctx, pid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.CreateScenePayload{ + Scene: gqlmodel.ToScene(res), + }, nil +} + +func (r *mutationResolver) AddWidget(ctx context.Context, input gqlmodel.AddWidgetInput) (*gqlmodel.AddWidgetPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + pid, err := gqlmodel.ToPluginID(input.PluginID) + if err != nil { + return nil, err + } + + scene, widget, err := usecases(ctx).Scene.AddWidget( + ctx, + sid, + pid, + id.PluginExtensionID(input.ExtensionID), + getOperator(ctx), + ) + if err != nil { + return nil, err + } + + return &gqlmodel.AddWidgetPayload{ + Scene: gqlmodel.ToScene(scene), + SceneWidget: gqlmodel.ToSceneWidget(widget), + }, nil +} + +func (r *mutationResolver) UpdateWidget(ctx context.Context, input gqlmodel.UpdateWidgetInput) (*gqlmodel.UpdateWidgetPayload, error) { + sid, wid, err := gqlmodel.ToID2[id.Scene, id.Widget](input.SceneID, input.WidgetID) + if err != nil { + return nil, err + } + + scene, widget, err := usecases(ctx).Scene.UpdateWidget(ctx, interfaces.UpdateWidgetParam{ + SceneID: sid, + WidgetID: wid, + Enabled: input.Enabled, + Extended: input.Extended, + Location: gqlmodel.FromSceneWidgetLocation(input.Location), + Index: input.Index, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateWidgetPayload{ + Scene: gqlmodel.ToScene(scene), + SceneWidget: gqlmodel.ToSceneWidget(widget), + }, nil +} + +func (r *mutationResolver) RemoveWidget(ctx context.Context, input gqlmodel.RemoveWidgetInput) (*gqlmodel.RemoveWidgetPayload, error) { + sid, wid, err := gqlmodel.ToID2[id.Scene, id.Widget](input.SceneID, input.WidgetID) + if err != nil { + return nil, err + } + + scene, err := usecases(ctx).Scene.RemoveWidget(ctx, + sid, + wid, + getOperator(ctx), + ) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveWidgetPayload{ + Scene: gqlmodel.ToScene(scene), + WidgetID: input.WidgetID, + }, nil +} + +func (r *mutationResolver) UpdateWidgetAlignSystem(ctx context.Context, input gqlmodel.UpdateWidgetAlignSystemInput) (*gqlmodel.UpdateWidgetAlignSystemPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + scene, err := usecases(ctx).Scene.UpdateWidgetAlignSystem(ctx, interfaces.UpdateWidgetAlignSystemParam{ + SceneID: sid, + Location: *gqlmodel.FromSceneWidgetLocation(input.Location), + Align: gqlmodel.FromWidgetAlignType(input.Align), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateWidgetAlignSystemPayload{ + Scene: gqlmodel.ToScene(scene), + }, nil +} + +func (r *mutationResolver) InstallPlugin(ctx context.Context, input gqlmodel.InstallPluginInput) (*gqlmodel.InstallPluginPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + pid, err := gqlmodel.ToPluginID(input.PluginID) + if err != nil { + return nil, err + } + + scene, pr, err := usecases(ctx).Scene.InstallPlugin(ctx, sid, pid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.InstallPluginPayload{ + Scene: gqlmodel.ToScene(scene), ScenePlugin: &gqlmodel.ScenePlugin{ + PluginID: input.PluginID, + PropertyID: gqlmodel.IDFromRef(pr), + }, + }, nil +} + +func (r *mutationResolver) UploadPlugin(ctx context.Context, input gqlmodel.UploadPluginInput) (*gqlmodel.UploadPluginPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + operator := getOperator(ctx) + var p *plugin.Plugin + var s *scene.Scene + + if input.File != nil { + p, s, err = usecases(ctx).Plugin.Upload(ctx, input.File.File, sid, operator) + } else if input.URL != nil { + p, s, err = usecases(ctx).Plugin.UploadFromRemote(ctx, input.URL, sid, operator) + } else { + return nil, errors.New("either file or url is required") + } + if err != nil { + return nil, err + } + + return &gqlmodel.UploadPluginPayload{ + Plugin: gqlmodel.ToPlugin(p), + Scene: gqlmodel.ToScene(s), + ScenePlugin: gqlmodel.ToScenePlugin(s.Plugins().Plugin(p.ID())), + }, nil +} + +func (r *mutationResolver) UninstallPlugin(ctx context.Context, input gqlmodel.UninstallPluginInput) (*gqlmodel.UninstallPluginPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + pid, err := gqlmodel.ToPluginID(input.PluginID) + if err != nil { + return nil, err + } + + scene, err := usecases(ctx).Scene.UninstallPlugin(ctx, sid, pid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UninstallPluginPayload{ + PluginID: input.PluginID, + Scene: gqlmodel.ToScene(scene), + }, nil +} + +func (r *mutationResolver) UpgradePlugin(ctx context.Context, input gqlmodel.UpgradePluginInput) (*gqlmodel.UpgradePluginPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + pid, topid, err := gqlmodel.ToPluginID2(input.PluginID, input.ToPluginID) + if err != nil { + return nil, err + } + + s, err := usecases(ctx).Scene.UpgradePlugin(ctx, + sid, + pid, + topid, + getOperator(ctx), + ) + if err != nil { + return nil, err + } + + return &gqlmodel.UpgradePluginPayload{ + Scene: gqlmodel.ToScene(s), + ScenePlugin: gqlmodel.ToScenePlugin(s.Plugins().Plugin(topid)), + }, nil +} + +func (r *mutationResolver) AddCluster(ctx context.Context, input gqlmodel.AddClusterInput) (*gqlmodel.AddClusterPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + s, c, err := usecases(ctx).Scene.AddCluster(ctx, sid, input.Name, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.AddClusterPayload{ + Scene: gqlmodel.ToScene(s), + Cluster: gqlmodel.ToCluster(c), + }, nil +} + +func (r *mutationResolver) UpdateCluster(ctx context.Context, input gqlmodel.UpdateClusterInput) (*gqlmodel.UpdateClusterPayload, error) { + sid, cid, err := gqlmodel.ToID2[id.Scene, id.Cluster](input.SceneID, input.ClusterID) + if err != nil { + return nil, err + } + + s, c, err := usecases(ctx).Scene.UpdateCluster(ctx, interfaces.UpdateClusterParam{ + ClusterID: cid, + SceneID: sid, + Name: input.Name, + PropertyID: gqlmodel.ToIDRef[id.Property](input.PropertyID), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateClusterPayload{ + Scene: gqlmodel.ToScene(s), + Cluster: gqlmodel.ToCluster(c), + }, nil +} + +func (r *mutationResolver) RemoveCluster(ctx context.Context, input gqlmodel.RemoveClusterInput) (*gqlmodel.RemoveClusterPayload, error) { + sid, cid, err := gqlmodel.ToID2[id.Scene, id.Cluster](input.SceneID, input.ClusterID) + if err != nil { + return nil, err + } + + s, err := usecases(ctx).Scene.RemoveCluster(ctx, sid, cid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveClusterPayload{ + Scene: gqlmodel.ToScene(s), + ClusterID: input.ClusterID, + }, nil +} diff --git a/server/internal/adapter/gql/resolver_mutation_tag.go b/server/internal/adapter/gql/resolver_mutation_tag.go new file mode 100644 index 000000000..dcfdaceed --- /dev/null +++ b/server/internal/adapter/gql/resolver_mutation_tag.go @@ -0,0 +1,138 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" +) + +func (r *mutationResolver) CreateTagItem(ctx context.Context, input gqlmodel.CreateTagItemInput) (*gqlmodel.CreateTagItemPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + tag, parent, err := usecases(ctx).Tag.CreateItem(ctx, interfaces.CreateTagItemParam{ + Label: input.Label, + SceneID: sid, + Parent: gqlmodel.ToIDRef[id.Tag](input.Parent), + LinkedDatasetSchemaID: gqlmodel.ToIDRef[id.DatasetSchema](input.LinkedDatasetSchemaID), + LinkedDatasetID: gqlmodel.ToIDRef[id.Dataset](input.LinkedDatasetID), + LinkedDatasetField: gqlmodel.ToIDRef[id.DatasetField](input.LinkedDatasetField), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.CreateTagItemPayload{ + Tag: gqlmodel.ToTagItem(tag), + Parent: gqlmodel.ToTagGroup(parent), + }, nil +} + +func (r *mutationResolver) CreateTagGroup(ctx context.Context, input gqlmodel.CreateTagGroupInput) (*gqlmodel.CreateTagGroupPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + tags, err := util.TryMap(input.Tags, gqlmodel.ToID[id.Tag]) + if err != nil { + return nil, err + } + + tag, err := usecases(ctx).Tag.CreateGroup(ctx, interfaces.CreateTagGroupParam{ + Label: input.Label, + SceneID: sid, + Tags: tags, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.CreateTagGroupPayload{ + Tag: gqlmodel.ToTagGroup(tag), + }, nil +} + +func (r *mutationResolver) UpdateTag(ctx context.Context, input gqlmodel.UpdateTagInput) (*gqlmodel.UpdateTagPayload, error) { + tid, err := gqlmodel.ToID[id.Tag](input.TagID) + if err != nil { + return nil, err + } + + tag, err := usecases(ctx).Tag.UpdateTag(ctx, interfaces.UpdateTagParam{ + Label: input.Label, + TagID: tid, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.UpdateTagPayload{ + Tag: gqlmodel.ToTag(*tag), + }, nil +} + +func (r *mutationResolver) AttachTagItemToGroup(ctx context.Context, input gqlmodel.AttachTagItemToGroupInput) (*gqlmodel.AttachTagItemToGroupPayload, error) { + iid, gid, err := gqlmodel.ToID2[id.Tag, id.Tag](input.ItemID, input.GroupID) + if err != nil { + return nil, err + } + + tag, err := usecases(ctx).Tag.AttachItemToGroup(ctx, interfaces.AttachItemToGroupParam{ + ItemID: iid, + GroupID: gid, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.AttachTagItemToGroupPayload{ + Tag: gqlmodel.ToTagGroup(tag), + }, nil +} + +func (r *mutationResolver) DetachTagItemFromGroup(ctx context.Context, input gqlmodel.DetachTagItemFromGroupInput) (*gqlmodel.DetachTagItemFromGroupPayload, error) { + iid, gid, err := gqlmodel.ToID2[id.Tag, id.Tag](input.ItemID, input.GroupID) + if err != nil { + return nil, err + } + + tag, err := usecases(ctx).Tag.DetachItemFromGroup(ctx, interfaces.DetachItemToGroupParam{ + ItemID: iid, + GroupID: gid, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.DetachTagItemFromGroupPayload{ + Tag: gqlmodel.ToTagGroup(tag), + }, nil +} + +func (r *mutationResolver) RemoveTag(ctx context.Context, input gqlmodel.RemoveTagInput) (*gqlmodel.RemoveTagPayload, error) { + tid, err := gqlmodel.ToID[id.Tag](input.TagID) + if err != nil { + return nil, err + } + + _, layers, err := usecases(ctx).Tag.Remove(ctx, tid, getOperator(ctx)) + if err != nil { + return nil, err + } + + updatedLayers := make([]gqlmodel.Layer, 0, len(layers)) + for _, l := range layers { + if l == nil { + updatedLayers = append(updatedLayers, nil) + } else { + updatedLayers = append(updatedLayers, gqlmodel.ToLayer(*l, nil)) + } + } + + return &gqlmodel.RemoveTagPayload{ + TagID: input.TagID, + UpdatedLayers: updatedLayers, + }, nil +} diff --git a/server/internal/adapter/gql/resolver_mutation_team.go b/server/internal/adapter/gql/resolver_mutation_team.go new file mode 100644 index 000000000..46e78e035 --- /dev/null +++ b/server/internal/adapter/gql/resolver_mutation_team.go @@ -0,0 +1,86 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *mutationResolver) CreateTeam(ctx context.Context, input gqlmodel.CreateTeamInput) (*gqlmodel.CreateTeamPayload, error) { + res, err := usecases(ctx).Team.Create(ctx, input.Name, getUser(ctx).ID(), getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.CreateTeamPayload{Team: gqlmodel.ToTeam(res)}, nil +} + +func (r *mutationResolver) DeleteTeam(ctx context.Context, input gqlmodel.DeleteTeamInput) (*gqlmodel.DeleteTeamPayload, error) { + tid, err := gqlmodel.ToID[id.Team](input.TeamID) + if err != nil { + return nil, err + } + + if err := usecases(ctx).Team.Remove(ctx, tid, getOperator(ctx)); err != nil { + return nil, err + } + + return &gqlmodel.DeleteTeamPayload{TeamID: input.TeamID}, nil +} + +func (r *mutationResolver) UpdateTeam(ctx context.Context, input gqlmodel.UpdateTeamInput) (*gqlmodel.UpdateTeamPayload, error) { + tid, err := gqlmodel.ToID[id.Team](input.TeamID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Team.Update(ctx, tid, input.Name, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateTeamPayload{Team: gqlmodel.ToTeam(res)}, nil +} + +func (r *mutationResolver) AddMemberToTeam(ctx context.Context, input gqlmodel.AddMemberToTeamInput) (*gqlmodel.AddMemberToTeamPayload, error) { + tid, uid, err := gqlmodel.ToID2[id.Team, id.User](input.TeamID, input.UserID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Team.AddMember(ctx, tid, uid, gqlmodel.FromRole(input.Role), getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.AddMemberToTeamPayload{Team: gqlmodel.ToTeam(res)}, nil +} + +func (r *mutationResolver) RemoveMemberFromTeam(ctx context.Context, input gqlmodel.RemoveMemberFromTeamInput) (*gqlmodel.RemoveMemberFromTeamPayload, error) { + tid, uid, err := gqlmodel.ToID2[id.Team, id.User](input.TeamID, input.UserID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Team.RemoveMember(ctx, tid, uid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveMemberFromTeamPayload{Team: gqlmodel.ToTeam(res)}, nil +} + +func (r *mutationResolver) UpdateMemberOfTeam(ctx context.Context, input gqlmodel.UpdateMemberOfTeamInput) (*gqlmodel.UpdateMemberOfTeamPayload, error) { + tid, uid, err := gqlmodel.ToID2[id.Team, id.User](input.TeamID, input.UserID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Team.UpdateMember(ctx, tid, uid, gqlmodel.FromRole(input.Role), getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateMemberOfTeamPayload{Team: gqlmodel.ToTeam(res)}, nil +} diff --git a/server/internal/adapter/gql/resolver_mutation_user.go b/server/internal/adapter/gql/resolver_mutation_user.go new file mode 100644 index 000000000..ce70025f0 --- /dev/null +++ b/server/internal/adapter/gql/resolver_mutation_user.go @@ -0,0 +1,75 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *mutationResolver) Signup(ctx context.Context, input gqlmodel.SignupInput) (*gqlmodel.SignupPayload, error) { + au := adapter.GetAuthInfo(ctx) + if au == nil { + return nil, interfaces.ErrOperationDenied + } + + u, t, err := usecases(ctx).User.SignupOIDC(ctx, interfaces.SignupOIDCParam{ + Sub: au.Sub, + AccessToken: au.Token, + Issuer: au.Iss, + Email: au.Email, + Name: au.Name, + Secret: input.Secret, + User: interfaces.SignupUserParam{ + Lang: input.Lang, + Theme: gqlmodel.ToTheme(input.Theme), + UserID: gqlmodel.ToIDRef[id.User](input.UserID), + TeamID: gqlmodel.ToIDRef[id.Team](input.TeamID), + }, + }) + if err != nil { + return nil, err + } + + return &gqlmodel.SignupPayload{User: gqlmodel.ToUser(u), Team: gqlmodel.ToTeam(t)}, nil +} + +func (r *mutationResolver) UpdateMe(ctx context.Context, input gqlmodel.UpdateMeInput) (*gqlmodel.UpdateMePayload, error) { + res, err := usecases(ctx).User.UpdateMe(ctx, interfaces.UpdateMeParam{ + Name: input.Name, + Email: input.Email, + Lang: input.Lang, + Theme: gqlmodel.ToTheme(input.Theme), + Password: input.Password, + PasswordConfirmation: input.PasswordConfirmation, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateMePayload{Me: gqlmodel.ToMe(res)}, nil +} + +func (r *mutationResolver) RemoveMyAuth(ctx context.Context, input gqlmodel.RemoveMyAuthInput) (*gqlmodel.UpdateMePayload, error) { + res, err := usecases(ctx).User.RemoveMyAuth(ctx, input.Auth, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateMePayload{Me: gqlmodel.ToMe(res)}, nil +} + +func (r *mutationResolver) DeleteMe(ctx context.Context, input gqlmodel.DeleteMeInput) (*gqlmodel.DeleteMePayload, error) { + uid, err := gqlmodel.ToID[id.User](input.UserID) + if err != nil { + return nil, err + } + + if err := usecases(ctx).User.DeleteMe(ctx, uid, getOperator(ctx)); err != nil { + return nil, err + } + + return &gqlmodel.DeleteMePayload{UserID: input.UserID}, nil +} diff --git a/server/internal/adapter/gql/resolver_plugin.go b/server/internal/adapter/gql/resolver_plugin.go new file mode 100644 index 000000000..c46945134 --- /dev/null +++ b/server/internal/adapter/gql/resolver_plugin.go @@ -0,0 +1,86 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "golang.org/x/text/language" +) + +func (r *Resolver) Plugin() PluginResolver { + return &pluginResolver{r} +} + +func (r *Resolver) PluginExtension() PluginExtensionResolver { + return &pluginExtensionResolver{r} +} + +type pluginResolver struct{ *Resolver } + +func (r *pluginResolver) PropertySchema(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.PropertySchema, error) { + if obj.PropertySchemaID == nil { + return nil, nil + } + return dataloaders(ctx).PropertySchema.Load(*obj.PropertySchemaID) +} + +func (r *pluginResolver) Scene(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.Scene, error) { + if obj.SceneID == nil { + return nil, nil + } + return dataloaders(ctx).Scene.Load(*obj.SceneID) +} + +func (r *pluginResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Plugin, sceneID *gqlmodel.ID) (*gqlmodel.ScenePlugin, error) { + if sceneID == nil && obj.SceneID != nil { + sceneID = obj.SceneID + } + if sceneID == nil { + return nil, nil + } + s, err := dataloaders(ctx).Scene.Load(*sceneID) + return s.Plugin(obj.ID), err +} + +func (r *pluginResolver) TranslatedName(ctx context.Context, obj *gqlmodel.Plugin, lang *language.Tag) (string, error) { + if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Name, nil +} + +func (r *pluginResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.Plugin, lang *language.Tag) (string, error) { + if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Description, nil +} + +type pluginExtensionResolver struct{ *Resolver } + +func (r *pluginExtensionResolver) Plugin(ctx context.Context, obj *gqlmodel.PluginExtension) (*gqlmodel.Plugin, error) { + return dataloaders(ctx).Plugin.Load(obj.PluginID) +} + +func (r *pluginExtensionResolver) PropertySchema(ctx context.Context, obj *gqlmodel.PluginExtension) (*gqlmodel.PropertySchema, error) { + return dataloaders(ctx).PropertySchema.Load(obj.PropertySchemaID) +} + +func (r *pluginExtensionResolver) SceneWidget(ctx context.Context, obj *gqlmodel.PluginExtension, sceneID gqlmodel.ID) (*gqlmodel.SceneWidget, error) { + s, err := dataloaders(ctx).Scene.Load(sceneID) + return s.Widget(obj.PluginID, obj.ExtensionID), err +} + +func (r *pluginExtensionResolver) TranslatedName(ctx context.Context, obj *gqlmodel.PluginExtension, lang *language.Tag) (string, error) { + if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Name, nil +} + +func (r *pluginExtensionResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.PluginExtension, lang *language.Tag) (string, error) { + if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Description, nil +} diff --git a/server/internal/adapter/gql/resolver_project.go b/server/internal/adapter/gql/resolver_project.go new file mode 100644 index 000000000..6aa38add1 --- /dev/null +++ b/server/internal/adapter/gql/resolver_project.go @@ -0,0 +1,26 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +func (r *Resolver) Project() ProjectResolver { + return &projectResolver{r} +} + +type projectResolver struct{ *Resolver } + +func (r *projectResolver) Team(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Team, error) { + return dataloaders(ctx).Team.Load(obj.TeamID) +} + +func (r *projectResolver) Scene(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Scene, error) { + s, err := loaders(ctx).Scene.FindByProject(ctx, obj.ID) + if err != nil && err != rerror.ErrNotFound { + return nil, err + } + return s, nil +} diff --git a/server/internal/adapter/gql/resolver_property.go b/server/internal/adapter/gql/resolver_property.go new file mode 100644 index 000000000..26a539bc5 --- /dev/null +++ b/server/internal/adapter/gql/resolver_property.go @@ -0,0 +1,341 @@ +package gql + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +func (r *Resolver) Property() PropertyResolver { + return &propertyResolver{r} +} + +func (r *Resolver) PropertyField() PropertyFieldResolver { + return &propertyFieldResolver{r} +} + +func (r *Resolver) PropertyFieldLink() PropertyFieldLinkResolver { + return &propertyFieldLinkResolver{r} +} + +func (r *Resolver) MergedProperty() MergedPropertyResolver { + return &mergedPropertyResolver{r} +} + +func (r *Resolver) MergedPropertyGroup() MergedPropertyGroupResolver { + return &mergedPropertyGroupResolver{r} +} + +func (r *Resolver) MergedPropertyField() MergedPropertyFieldResolver { + return &mergedPropertyFieldResolver{r} +} + +func (r *Resolver) PropertyGroupList() PropertyGroupListResolver { + return &propertyGroupListResolver{r} +} + +func (r *Resolver) PropertyGroup() PropertyGroupResolver { + return &propertyGroupResolver{r} +} + +type propertyResolver struct{ *Resolver } + +func (r *propertyResolver) Schema(ctx context.Context, obj *gqlmodel.Property) (*gqlmodel.PropertySchema, error) { + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (r *propertyResolver) Layer(ctx context.Context, obj *gqlmodel.Property) (gqlmodel.Layer, error) { + l, err := loaders(ctx).Layer.FetchByProperty(ctx, obj.ID) + if err != nil || errors.Is(err, rerror.ErrNotFound) { + return nil, nil + } + return l, err +} + +func (r *propertyResolver) Merged(ctx context.Context, obj *gqlmodel.Property) (*gqlmodel.MergedProperty, error) { + l, err := loaders(ctx).Layer.FetchByProperty(ctx, obj.ID) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return nil, nil + } + return nil, err + } + li, ok := l.(*gqlmodel.LayerItem) + if !ok { + return nil, nil + } + merged, err := r.LayerItem().Merged(ctx, li) + if err != nil { + return nil, err + } + if merged == nil { + return nil, nil + } + if merged.Property != nil && merged.Property.OriginalID != nil && *merged.Property.OriginalID == obj.ID { + return merged.Property, nil + } else if merged.Infobox != nil && merged.Infobox.Property != nil && merged.Infobox.Property.OriginalID != nil && *merged.Infobox.Property.OriginalID == obj.ID { + return merged.Infobox.Property, nil + } + return nil, nil +} + +type propertyFieldResolver struct{ *Resolver } + +func (r *propertyFieldResolver) Parent(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.Property, error) { + return dataloaders(ctx).Property.Load(obj.ParentID) +} + +func (r *propertyFieldResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.PropertySchema, error) { + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (r *propertyFieldResolver) Field(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.PropertySchemaField, error) { + schema, err := dataloaders(ctx).PropertySchema.Load(obj.SchemaID) + if err != nil { + return nil, err + } + return schema.Field(obj.FieldID), nil +} + +func (r *propertyFieldResolver) ActualValue(ctx context.Context, obj *gqlmodel.PropertyField) (interface{}, error) { + datasetLoader := dataloaders(ctx).Dataset + return actualValue(datasetLoader, obj.Value, obj.Links, false) +} + +type propertyFieldLinkResolver struct{ *Resolver } + +func (r *propertyFieldLinkResolver) Dataset(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.Dataset, error) { + if obj.DatasetID == nil { + return nil, nil + } + return dataloaders(ctx).Dataset.Load(*obj.DatasetID) +} + +func (r *propertyFieldLinkResolver) DatasetField(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetField, error) { + if obj.DatasetID == nil { + return nil, nil + } + d, err := dataloaders(ctx).Dataset.Load(*obj.DatasetID) + if err != nil { + return nil, err + } + return d.Field(obj.DatasetSchemaFieldID), nil +} + +func (r *propertyFieldLinkResolver) DatasetSchema(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetSchema, error) { + return dataloaders(ctx).DatasetSchema.Load(obj.DatasetSchemaID) +} + +func (r *propertyFieldLinkResolver) DatasetSchemaField(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetSchemaField, error) { + ds, err := dataloaders(ctx).DatasetSchema.Load(obj.DatasetSchemaID) + return ds.Field(obj.DatasetSchemaFieldID), err +} + +type mergedPropertyResolver struct{ *Resolver } + +func (r *mergedPropertyResolver) Original(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Property, error) { + if obj.OriginalID == nil { + return nil, nil + } + return dataloaders(ctx).Property.Load(*obj.OriginalID) +} + +func (r *mergedPropertyResolver) Parent(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Property, error) { + if obj.ParentID == nil { + return nil, nil + } + return dataloaders(ctx).Property.Load(*obj.ParentID) +} + +func (r *mergedPropertyResolver) Schema(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.PropertySchema, error) { + if obj.SchemaID == nil { + if propertyID := obj.PropertyID(); propertyID != nil { + property, err := dataloaders(ctx).Property.Load(*propertyID) + if err != nil { + return nil, err + } + if property == nil { + return nil, nil + } + return dataloaders(ctx).PropertySchema.Load(property.SchemaID) + } + return nil, nil + } + return dataloaders(ctx).PropertySchema.Load(*obj.SchemaID) +} + +func (r *mergedPropertyResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Dataset, error) { + if obj.LinkedDatasetID == nil { + return nil, nil + } + return dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) +} + +func (r *mergedPropertyResolver) Groups(ctx context.Context, obj *gqlmodel.MergedProperty) ([]*gqlmodel.MergedPropertyGroup, error) { + if obj.Groups != nil { + return obj.Groups, nil + } + m, err := loaders(ctx).Property.FetchMerged(ctx, obj.OriginalID, obj.ParentID, obj.LinkedDatasetID) + if err != nil || m == nil { + return nil, err + } + return m.Groups, nil +} + +type mergedPropertyGroupResolver struct{ *Resolver } + +func (r *mergedPropertyGroupResolver) Original(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertyGroup, error) { + if obj.OriginalID == nil || obj.OriginalPropertyID == nil { + return nil, nil + } + p, err := dataloaders(ctx).Property.Load(*obj.OriginalID) + if err != nil { + return nil, err + } + if i, ok := p.Item(*obj.OriginalID).(*gqlmodel.PropertyGroup); ok { + return i, nil + } + return nil, nil +} + +func (r *mergedPropertyGroupResolver) Parent(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertyGroup, error) { + if obj.ParentID == nil || obj.ParentPropertyID == nil { + return nil, nil + } + p, err := dataloaders(ctx).Property.Load(*obj.ParentID) + if err != nil { + return nil, err + } + if i, ok := p.Item(*obj.ParentID).(*gqlmodel.PropertyGroup); ok { + return i, nil + } + return nil, nil +} + +func (r *mergedPropertyGroupResolver) OriginalProperty(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Property, error) { + if obj.OriginalID == nil { + return nil, nil + } + return dataloaders(ctx).Property.Load(*obj.OriginalID) +} + +func (r *mergedPropertyGroupResolver) ParentProperty(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Property, error) { + if obj.ParentID == nil { + return nil, nil + } + return dataloaders(ctx).Property.Load(*obj.ParentID) +} + +func (r *mergedPropertyGroupResolver) Schema(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertySchema, error) { + if obj.SchemaID == nil { + if propertyID := obj.PropertyID(); propertyID != nil { + property, err := dataloaders(ctx).Property.Load(*propertyID) + if err != nil { + return nil, err + } + if property == nil { + return nil, nil + } + return dataloaders(ctx).PropertySchema.Load(property.SchemaID) + } + return nil, nil + } + return dataloaders(ctx).PropertySchema.Load(*obj.SchemaID) +} + +func (r *mergedPropertyGroupResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Dataset, error) { + if obj.LinkedDatasetID == nil { + return nil, nil + } + return dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) +} + +type mergedPropertyFieldResolver struct{ *Resolver } + +func (r *mergedPropertyFieldResolver) Schema(ctx context.Context, obj *gqlmodel.MergedPropertyField) (*gqlmodel.PropertySchema, error) { + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (r *mergedPropertyFieldResolver) Field(ctx context.Context, obj *gqlmodel.MergedPropertyField) (*gqlmodel.PropertySchemaField, error) { + s, err := dataloaders(ctx).PropertySchema.Load(obj.SchemaID) + return s.Field(obj.FieldID), err +} + +func (r *mergedPropertyFieldResolver) ActualValue(ctx context.Context, obj *gqlmodel.MergedPropertyField) (interface{}, error) { + datasetLoader := dataloaders(ctx).Dataset + return actualValue(datasetLoader, obj.Value, obj.Links, obj.Overridden) +} + +type propertyGroupListResolver struct{ *Resolver } + +func (*propertyGroupListResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyGroupList) (*gqlmodel.PropertySchema, error) { + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (*propertyGroupListResolver) SchemaGroup(ctx context.Context, obj *gqlmodel.PropertyGroupList) (*gqlmodel.PropertySchemaGroup, error) { + s, err := dataloaders(ctx).PropertySchema.Load(obj.SchemaID) + if err != nil { + return nil, err + } + return s.Group(obj.SchemaGroupID), nil +} + +type propertyGroupResolver struct{ *Resolver } + +func (*propertyGroupResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyGroup) (*gqlmodel.PropertySchema, error) { + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (*propertyGroupResolver) SchemaGroup(ctx context.Context, obj *gqlmodel.PropertyGroup) (*gqlmodel.PropertySchemaGroup, error) { + s, err := dataloaders(ctx).PropertySchema.Load(obj.SchemaID) + if err != nil { + return nil, err + } + return s.Group(obj.SchemaGroupID), nil +} + +func actualValue(datasetLoader DatasetDataLoader, value interface{}, links []*gqlmodel.PropertyFieldLink, overridden bool) (interface{}, error) { + if len(links) == 0 || overridden { + return &value, nil + } + // ๅ…ˆ้ ญใฎใƒชใƒณใ‚ฏใซใ—ใ‹DatasetใŒๅ‰ฒใ‚Šๅฝ“ใฆใ‚‰ใ‚Œใฆใ„ใชใ„โ†’ๅ…ˆ้ ญใ‹ใ‚‰้ †ใ€…ใซ่พฟใฃใฆใ„ใ + if len(links) > 1 && links[0].DatasetID != nil && links[len(links)-1].DatasetID == nil { + dsid := *links[0].DatasetID + for i, link := range links { + ds, err := datasetLoader.Load(dsid) + if err != nil { + return nil, err + } + field := ds.Field(link.DatasetSchemaFieldID) + if field != nil { + if i == len(links)-1 { + return &value, nil + } else if field.Type != gqlmodel.ValueTypeRef { + return nil, nil + } + if field.Value != nil { + val, ok := (field.Value).(string) + if ok { + dsid = gqlmodel.ID(val) + } else { + return nil, nil + } + } else { + return nil, nil + } + } + } + } else if lastLink := links[len(links)-1]; lastLink.DatasetID != nil { + // ไธ€็•ชๆœ€ๅพŒใฎใƒชใƒณใ‚ฏใ‚’ๅ–ๅพ— + ds, err := datasetLoader.Load(*lastLink.DatasetID) + if err != nil { + return nil, err + } + if f := ds.Field(lastLink.DatasetSchemaFieldID); f != nil { + return &f.Value, nil + } + } + return nil, nil +} diff --git a/server/internal/adapter/gql/resolver_property_schema.go b/server/internal/adapter/gql/resolver_property_schema.go new file mode 100644 index 000000000..5dea300b4 --- /dev/null +++ b/server/internal/adapter/gql/resolver_property_schema.go @@ -0,0 +1,88 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "golang.org/x/text/language" +) + +func (r *Resolver) PropertySchemaField() PropertySchemaFieldResolver { + return &propertySchemaFieldResolver{r} +} + +func (r *Resolver) PropertySchemaFieldChoice() PropertySchemaFieldChoiceResolver { + return &propertySchemaFieldChoiceResolver{r} +} + +func (r *Resolver) PropertyLinkableFields() PropertyLinkableFieldsResolver { + return &propertyLinkableFieldsResolver{r} +} + +func (r *Resolver) PropertySchemaGroup() PropertySchemaGroupResolver { + return &propertySchemaGroupResolver{r} +} + +type propertySchemaFieldResolver struct{ *Resolver } + +func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *language.Tag) (string, error) { + if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Title, nil +} + +func (r *propertySchemaFieldResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *language.Tag) (string, error) { + if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Description, nil +} + +type propertyLinkableFieldsResolver struct{ *Resolver } + +func (r *propertyLinkableFieldsResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchema, error) { + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (r *propertyLinkableFieldsResolver) LatlngField(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchemaField, error) { + if obj.Latlng == nil { + return nil, nil + } + ps, err := dataloaders(ctx).PropertySchema.Load(obj.SchemaID) + return ps.Field(*obj.Latlng), err +} + +func (r *propertyLinkableFieldsResolver) URLField(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchemaField, error) { + if obj.URL == nil { + return nil, nil + } + ps, err := dataloaders(ctx).PropertySchema.Load(obj.SchemaID) + return ps.Field(*obj.URL), err +} + +type propertySchemaGroupResolver struct{ *Resolver } + +func (r *propertySchemaGroupResolver) Schema(ctx context.Context, obj *gqlmodel.PropertySchemaGroup) (*gqlmodel.PropertySchema, error) { + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (r *propertySchemaGroupResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaGroup, lang *language.Tag) (string, error) { + if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { + return s, nil + } + + if obj.Title == nil { + return "", nil + } + return *obj.Title, nil +} + +type propertySchemaFieldChoiceResolver struct{ *Resolver } + +func (r *propertySchemaFieldChoiceResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *language.Tag) (string, error) { + if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Title, nil +} diff --git a/server/internal/adapter/gql/resolver_property_test.go b/server/internal/adapter/gql/resolver_property_test.go new file mode 100644 index 000000000..e6cd7ee40 --- /dev/null +++ b/server/internal/adapter/gql/resolver_property_test.go @@ -0,0 +1,52 @@ +package gql + +import ( + "testing" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/stretchr/testify/assert" +) + +func Test_actualValue(t *testing.T) { + value := 300 + + type args struct { + datasetLoader DatasetDataLoader + value interface{} + links []*gqlmodel.PropertyFieldLink + overridden bool + } + var tests = []struct { + name string + args args + want interface{} + wantErr bool + }{ + { + "Overridden value", + args{ + datasetLoader: nil, + value: value, + links: nil, + overridden: true, + }, + 300, + false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + got, err := actualValue(tt.args.datasetLoader, tt.args.value, tt.args.links, tt.args.overridden) + if (err != nil) != tt.wantErr { + t.Errorf("actualValue() error = %v, wantErr %v", err, tt.wantErr) + return + } + temp := got.(*interface{}) + t2 := (*temp).(int) + assert.Equal(t, tt.want, t2) + }) + } +} diff --git a/server/internal/adapter/gql/resolver_query.go b/server/internal/adapter/gql/resolver_query.go new file mode 100644 index 000000000..90aaa1611 --- /dev/null +++ b/server/internal/adapter/gql/resolver_query.go @@ -0,0 +1,261 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" +) + +func (r *Resolver) Query() QueryResolver { + return &queryResolver{r} +} + +type queryResolver struct{ *Resolver } + +func (r *queryResolver) Assets(ctx context.Context, teamID gqlmodel.ID, keyword *string, sortType *gqlmodel.AssetSortType, pagination *gqlmodel.Pagination) (*gqlmodel.AssetConnection, error) { + return loaders(ctx).Asset.FindByTeam(ctx, teamID, keyword, gqlmodel.AssetSortTypeFrom(sortType), pagination) +} + +func (r *queryResolver) Me(ctx context.Context) (*gqlmodel.Me, error) { + u := getUser(ctx) + if u == nil { + return nil, nil + } + return gqlmodel.ToMe(u), nil +} + +func (r *queryResolver) Node(ctx context.Context, i gqlmodel.ID, typeArg gqlmodel.NodeType) (gqlmodel.Node, error) { + dataloaders := dataloaders(ctx) + switch typeArg { + case gqlmodel.NodeTypeAsset: + result, err := dataloaders.Asset.Load(i) + if result == nil { + return nil, nil + } + return result, err + case gqlmodel.NodeTypeDataset: + result, err := dataloaders.Dataset.Load(i) + if result == nil { + return nil, nil + } + return result, err + case gqlmodel.NodeTypeDatasetSchema: + result, err := dataloaders.DatasetSchema.Load(i) + if result == nil { + return nil, nil + } + return result, err + case gqlmodel.NodeTypeLayerItem: + result, err := dataloaders.LayerItem.Load(i) + if result == nil { + return nil, nil + } + return result, err + case gqlmodel.NodeTypeLayerGroup: + result, err := dataloaders.LayerGroup.Load(i) + if result == nil { + return nil, nil + } + return result, err + case gqlmodel.NodeTypeProject: + result, err := dataloaders.Project.Load(i) + if result == nil { + return nil, nil + } + return result, err + case gqlmodel.NodeTypeProperty: + result, err := dataloaders.Property.Load(i) + if result == nil { + return nil, nil + } + return result, err + case gqlmodel.NodeTypeScene: + result, err := dataloaders.Scene.Load(i) + if result == nil { + return nil, nil + } + return result, err + case gqlmodel.NodeTypeTeam: + result, err := dataloaders.Team.Load(i) + if result == nil { + return nil, nil + } + return result, err + case gqlmodel.NodeTypeUser: + result, err := dataloaders.User.Load(i) + if result == nil { + return nil, nil + } + return result, err + } + return nil, nil +} + +func (r *queryResolver) Nodes(ctx context.Context, ids []gqlmodel.ID, typeArg gqlmodel.NodeType) ([]gqlmodel.Node, error) { + dataloaders := dataloaders(ctx) + switch typeArg { + case gqlmodel.NodeTypeAsset: + data, err := dataloaders.Asset.LoadAll(ids) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]gqlmodel.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case gqlmodel.NodeTypeDataset: + data, err := dataloaders.Dataset.LoadAll(ids) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]gqlmodel.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case gqlmodel.NodeTypeDatasetSchema: + data, err := dataloaders.DatasetSchema.LoadAll(ids) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]gqlmodel.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case gqlmodel.NodeTypeLayerItem: + data, err := dataloaders.LayerItem.LoadAll(ids) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]gqlmodel.Node, len(data)) + for i := range data { + nodes[i] = *data[i] + } + return nodes, nil + case gqlmodel.NodeTypeLayerGroup: + data, err := dataloaders.LayerGroup.LoadAll(ids) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]gqlmodel.Node, len(data)) + for i := range data { + nodes[i] = *data[i] + } + return nodes, nil + case gqlmodel.NodeTypeProject: + data, err := dataloaders.Project.LoadAll(ids) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]gqlmodel.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case gqlmodel.NodeTypeProperty: + data, err := dataloaders.Property.LoadAll(ids) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]gqlmodel.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case gqlmodel.NodeTypeScene: + data, err := dataloaders.Scene.LoadAll(ids) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]gqlmodel.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case gqlmodel.NodeTypeTeam: + data, err := dataloaders.Team.LoadAll(ids) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]gqlmodel.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case gqlmodel.NodeTypeUser: + data, err := dataloaders.User.LoadAll(ids) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]gqlmodel.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + default: + return nil, nil + } +} + +func (r *queryResolver) PropertySchema(ctx context.Context, i gqlmodel.ID) (*gqlmodel.PropertySchema, error) { + return dataloaders(ctx).PropertySchema.Load(i) +} + +func (r *queryResolver) PropertySchemas(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.PropertySchema, error) { + data, err := dataloaders(ctx).PropertySchema.LoadAll(ids) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + return data, nil +} + +func (r *queryResolver) Plugin(ctx context.Context, id gqlmodel.ID) (*gqlmodel.Plugin, error) { + return dataloaders(ctx).Plugin.Load(id) +} + +func (r *queryResolver) Plugins(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Plugin, error) { + data, err := dataloaders(ctx).Plugin.LoadAll(ids) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + return data, nil +} + +func (r *queryResolver) Layer(ctx context.Context, layerID gqlmodel.ID) (gqlmodel.Layer, error) { + result, err := dataloaders(ctx).Layer.Load(layerID) + if result == nil || *result == nil { + return nil, nil + } + return *result, err +} + +func (r *queryResolver) Scene(ctx context.Context, projectID gqlmodel.ID) (*gqlmodel.Scene, error) { + return loaders(ctx).Scene.FindByProject(ctx, projectID) +} + +func (r *queryResolver) Projects(ctx context.Context, teamID gqlmodel.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { + return loaders(ctx).Project.FindByTeam(ctx, teamID, first, last, before, after) +} + +func (r *queryResolver) DatasetSchemas(ctx context.Context, sceneID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) { + return loaders(ctx).Dataset.FindSchemaByScene(ctx, sceneID, first, last, before, after) +} + +func (r *queryResolver) DynamicDatasetSchemas(ctx context.Context, sceneID gqlmodel.ID) ([]*gqlmodel.DatasetSchema, error) { + return loaders(ctx).Dataset.FindDynamicSchemasByScene(ctx, sceneID) +} + +func (r *queryResolver) Datasets(ctx context.Context, datasetSchemaID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) { + return loaders(ctx).Dataset.FindBySchema(ctx, datasetSchemaID, first, last, before, after) +} + +func (r *queryResolver) SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.User, error) { + return loaders(ctx).User.SearchUser(ctx, nameOrEmail) +} + +func (r *queryResolver) CheckProjectAlias(ctx context.Context, alias string) (*gqlmodel.ProjectAliasAvailability, error) { + return loaders(ctx).Project.CheckAlias(ctx, alias) +} diff --git a/server/internal/adapter/gql/resolver_scene.go b/server/internal/adapter/gql/resolver_scene.go new file mode 100644 index 000000000..eff213ba7 --- /dev/null +++ b/server/internal/adapter/gql/resolver_scene.go @@ -0,0 +1,117 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) Scene() SceneResolver { + return &sceneResolver{r} +} + +func (r *Resolver) ScenePlugin() ScenePluginResolver { + return &scenePluginResolver{r} +} + +func (r *Resolver) SceneWidget() SceneWidgetResolver { + return &sceneWidgetResolver{r} +} + +func (r *Resolver) Cluster() ClusterResolver { + return &clusterResolver{r} +} + +type sceneResolver struct{ *Resolver } + +func (r *sceneResolver) Project(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Project, error) { + return dataloaders(ctx).Project.Load(obj.ProjectID) +} + +func (r *sceneResolver) Team(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Team, error) { + return dataloaders(ctx).Team.Load(obj.TeamID) +} + +func (r *sceneResolver) Property(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Property, error) { + return dataloaders(ctx).Property.Load(obj.PropertyID) +} + +func (r *sceneResolver) RootLayer(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.LayerGroup, error) { + layer, err := dataloaders(ctx).Layer.Load(obj.RootLayerID) + if err != nil { + return nil, err + } + if layer == nil { + return nil, nil + } + layerGroup, ok := (*layer).(*gqlmodel.LayerGroup) + if !ok { + return nil, nil + } + return layerGroup, nil +} + +func (r *sceneResolver) DatasetSchemas(ctx context.Context, obj *gqlmodel.Scene, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) { + return loaders(ctx).Dataset.FindSchemaByScene(ctx, obj.ID, first, last, before, after) +} + +func (r *sceneResolver) Tags(ctx context.Context, obj *gqlmodel.Scene) ([]gqlmodel.Tag, error) { + sid, err := gqlmodel.ToID[id.Scene](obj.ID) + if err != nil { + return nil, err + } + + tags, err := usecases(ctx).Tag.FetchByScene(ctx, sid, getOperator(ctx)) + if err != nil { + return nil, err + } + + res := make([]gqlmodel.Tag, 0, len(tags)) + for _, t := range tags { + res = append(res, gqlmodel.ToTag(*t)) + } + return res, nil +} + +type scenePluginResolver struct{ *Resolver } + +func (r *scenePluginResolver) Plugin(ctx context.Context, obj *gqlmodel.ScenePlugin) (*gqlmodel.Plugin, error) { + return dataloaders(ctx).Plugin.Load(obj.PluginID) +} +func (r *scenePluginResolver) Property(ctx context.Context, obj *gqlmodel.ScenePlugin) (*gqlmodel.Property, error) { + if obj.PropertyID == nil { + return nil, nil + } + return dataloaders(ctx).Property.Load(*obj.PropertyID) +} + +type sceneWidgetResolver struct{ *Resolver } + +func (r *sceneWidgetResolver) Plugin(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Plugin, error) { + return dataloaders(ctx).Plugin.Load(obj.PluginID) +} + +func (r *sceneWidgetResolver) Extension(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.PluginExtension, error) { + plugin, err := dataloaders(ctx).Plugin.Load(obj.PluginID) + if err != nil { + return nil, err + } + for _, e := range plugin.Extensions { + if e.ExtensionID == obj.ExtensionID { + return e, nil + } + } + return nil, nil +} + +func (r *sceneWidgetResolver) Property(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Property, error) { + return dataloaders(ctx).Property.Load(obj.PropertyID) +} + +type clusterResolver struct{ *Resolver } + +func (r *clusterResolver) Property(ctx context.Context, obj *gqlmodel.Cluster) (*gqlmodel.Property, error) { + return dataloaders(ctx).Property.Load(obj.PropertyID) +} diff --git a/server/internal/adapter/gql/resolver_tag.go b/server/internal/adapter/gql/resolver_tag.go new file mode 100644 index 000000000..54bcfd580 --- /dev/null +++ b/server/internal/adapter/gql/resolver_tag.go @@ -0,0 +1,68 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +type tagItemResolver struct{ *Resolver } + +func (r *Resolver) TagItem() TagItemResolver { + return &tagItemResolver{r} +} + +func (t tagItemResolver) LinkedDatasetSchema(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetSchema, error) { + if obj.LinkedDatasetID == nil { + return nil, nil + } + return dataloaders(ctx).DatasetSchema.Load(*obj.LinkedDatasetSchemaID) +} + +func (t tagItemResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.Dataset, error) { + if obj.LinkedDatasetID == nil { + return nil, nil + } + return dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) +} + +func (t tagItemResolver) LinkedDatasetField(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetField, error) { + if obj.LinkedDatasetID == nil { + return nil, nil + } + ds, err := dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) + return ds.Field(*obj.LinkedDatasetFieldID), err +} + +func (t tagItemResolver) Parent(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.TagGroup, error) { + if obj.ParentID == nil { + return nil, nil + } + return dataloaders(ctx).TagGroup.Load(*obj.ParentID) +} + +func (tg tagItemResolver) Layers(ctx context.Context, obj *gqlmodel.TagItem) ([]gqlmodel.Layer, error) { + return loaders(ctx).Layer.FetchByTag(ctx, obj.ID) +} + +type tagGroupResolver struct{ *Resolver } + +func (r *Resolver) TagGroup() TagGroupResolver { + return &tagGroupResolver{r} +} + +func (r tagGroupResolver) Tags(ctx context.Context, obj *gqlmodel.TagGroup) ([]*gqlmodel.TagItem, error) { + tagItems, err := dataloaders(ctx).TagItem.LoadAll(obj.TagIds) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + return tagItems, nil +} + +func (r tagGroupResolver) Scene(ctx context.Context, obj *gqlmodel.TagGroup) (*gqlmodel.Scene, error) { + return dataloaders(ctx).Scene.Load(obj.SceneID) +} + +func (r tagGroupResolver) Layers(ctx context.Context, obj *gqlmodel.TagGroup) ([]gqlmodel.Layer, error) { + return loaders(ctx).Layer.FetchByTag(ctx, obj.ID) +} diff --git a/server/internal/adapter/gql/resolver_team.go b/server/internal/adapter/gql/resolver_team.go new file mode 100644 index 000000000..67455e63e --- /dev/null +++ b/server/internal/adapter/gql/resolver_team.go @@ -0,0 +1,37 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" +) + +func (r *Resolver) Team() TeamResolver { + return &teamResolver{r} +} + +func (r *Resolver) TeamMember() TeamMemberResolver { + return &teamMemberResolver{r} +} + +type teamResolver struct{ *Resolver } + +func (r *teamResolver) Assets(ctx context.Context, obj *gqlmodel.Team, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) { + return loaders(ctx).Asset.FindByTeam(ctx, obj.ID, nil, nil, &gqlmodel.Pagination{ + First: first, + Last: last, + After: after, + Before: before, + }) +} + +func (r *teamResolver) Projects(ctx context.Context, obj *gqlmodel.Team, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { + return loaders(ctx).Project.FindByTeam(ctx, obj.ID, first, last, before, after) +} + +type teamMemberResolver struct{ *Resolver } + +func (r *teamMemberResolver) User(ctx context.Context, obj *gqlmodel.TeamMember) (*gqlmodel.User, error) { + return dataloaders(ctx).User.Load(obj.UserID) +} diff --git a/server/internal/adapter/gql/resolver_user.go b/server/internal/adapter/gql/resolver_user.go new file mode 100644 index 000000000..cb0c12ecf --- /dev/null +++ b/server/internal/adapter/gql/resolver_user.go @@ -0,0 +1,21 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" +) + +func (r *Resolver) Me() MeResolver { + return &meResolver{r} +} + +type meResolver struct{ *Resolver } + +func (r *meResolver) MyTeam(ctx context.Context, obj *gqlmodel.Me) (*gqlmodel.Team, error) { + return dataloaders(ctx).Team.Load(obj.MyTeamID) +} + +func (r *meResolver) Teams(ctx context.Context, obj *gqlmodel.Me) ([]*gqlmodel.Team, error) { + return loaders(ctx).Team.FindByUser(ctx, obj.ID) +} diff --git a/server/internal/adapter/http/published.go b/server/internal/adapter/http/published.go new file mode 100644 index 000000000..82e6f23d1 --- /dev/null +++ b/server/internal/adapter/http/published.go @@ -0,0 +1,29 @@ +package http + +import ( + "context" + "io" + "net/url" + + "github.com/reearth/reearth-backend/internal/usecase/interfaces" +) + +type PublishedController struct { + usecase interfaces.Published +} + +func NewPublishedController(usecase interfaces.Published) *PublishedController { + return &PublishedController{usecase: usecase} +} + +func (c *PublishedController) Metadata(ctx context.Context, name string) (interfaces.ProjectPublishedMetadata, error) { + return c.usecase.Metadata(ctx, name) +} + +func (c *PublishedController) Data(ctx context.Context, name string) (io.Reader, error) { + return c.usecase.Data(ctx, name) +} + +func (c *PublishedController) Index(ctx context.Context, name string, url *url.URL) (string, error) { + return c.usecase.Index(ctx, name, url) +} diff --git a/server/internal/adapter/http/user.go b/server/internal/adapter/http/user.go new file mode 100644 index 000000000..e235189b5 --- /dev/null +++ b/server/internal/adapter/http/user.go @@ -0,0 +1,148 @@ +package http + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/adapter" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" + "golang.org/x/text/language" +) + +type UserController struct { + usecase interfaces.User +} + +func NewUserController(usecase interfaces.User) *UserController { + return &UserController{ + usecase: usecase, + } +} + +type PasswordResetInput struct { + Email string `json:"email"` + Token string `json:"token"` + Password string `json:"password"` +} + +type SignupInput struct { + Sub *string `json:"sub"` + Secret *string `json:"secret"` + UserID *id.UserID `json:"userId"` + TeamID *id.TeamID `json:"teamId"` + Name *string `json:"name"` + // Username is an alias of Name + Username *string `json:"username"` + Email *string `json:"email"` + Password *string `json:"password"` + Theme *user.Theme `json:"theme"` + Lang *language.Tag `json:"lang"` +} + +type CreateVerificationInput struct { + Email string `json:"email"` +} + +type VerifyUserOutput struct { + UserID string `json:"userId"` + Verified bool `json:"verified"` +} + +type CreateUserInput struct { + Sub string `json:"sub"` + Secret string `json:"secret"` + UserID *id.UserID `json:"userId"` + TeamID *id.TeamID `json:"teamId"` +} + +type SignupOutput struct { + ID string `json:"id"` + Name string `json:"name"` + Email string `json:"email"` +} + +func (c *UserController) Signup(ctx context.Context, input SignupInput) (SignupOutput, error) { + var u *user.User + var err error + + name := input.Name + if name == nil { + name = input.Username + } + if name == nil { + name = input.Email + } + + if au := adapter.GetAuthInfo(ctx); au != nil { + var name2 string + if name != nil { + name2 = *name + } + + u, _, err = c.usecase.SignupOIDC(ctx, interfaces.SignupOIDCParam{ + Sub: au.Sub, + AccessToken: au.Token, + Issuer: au.Iss, + Email: au.Email, + Name: name2, + Secret: input.Secret, + User: interfaces.SignupUserParam{ + UserID: input.UserID, + TeamID: input.TeamID, + Lang: input.Lang, + Theme: input.Theme, + }, + }) + } else if name != nil && input.Email != nil { + u, _, err = c.usecase.Signup(ctx, interfaces.SignupParam{ + Sub: input.Sub, + Name: *name, + Email: *input.Email, + Password: input.Password, + Secret: input.Secret, + User: interfaces.SignupUserParam{ + UserID: input.UserID, + TeamID: input.TeamID, + Lang: input.Lang, + Theme: input.Theme, + }, + }) + } else { + err = errors.New("invalid params") + } + + if err != nil { + return SignupOutput{}, err + } + + return SignupOutput{ + ID: u.ID().String(), + Name: u.Name(), + Email: u.Email(), + }, nil +} + +func (c *UserController) CreateVerification(ctx context.Context, input CreateVerificationInput) error { + return c.usecase.CreateVerification(ctx, input.Email) +} + +func (c *UserController) VerifyUser(ctx context.Context, code string) (VerifyUserOutput, error) { + u, err := c.usecase.VerifyUser(ctx, code) + if err != nil { + return VerifyUserOutput{}, err + } + return VerifyUserOutput{ + UserID: u.ID().String(), + Verified: u.Verification().IsVerified(), + }, nil +} + +func (c *UserController) StartPasswordReset(ctx context.Context, input PasswordResetInput) error { + return c.usecase.StartPasswordReset(ctx, input.Email) +} + +func (c *UserController) PasswordReset(ctx context.Context, input PasswordResetInput) error { + return c.usecase.PasswordReset(ctx, input.Password, input.Token) +} diff --git a/server/internal/app/app.go b/server/internal/app/app.go new file mode 100644 index 000000000..95d423316 --- /dev/null +++ b/server/internal/app/app.go @@ -0,0 +1,174 @@ +package app + +import ( + "context" + "errors" + "io/fs" + "net/http" + "net/http/pprof" + "os" + + "github.com/99designs/gqlgen/graphql/playground" + "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" + "github.com/reearth/reearth-backend/internal/usecase/interactor" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" + "go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho" +) + +func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { + if cfg.Config == nil { + log.Fatalln("ServerConfig.Config is nil") + } + + e := echo.New() + e.Debug = cfg.Debug + e.HideBanner = true + e.HidePort = true + e.HTTPErrorHandler = errorHandler(e.DefaultHTTPErrorHandler) + + // basic middleware + logger := GetEchoLogger() + e.Logger = logger + e.Use( + logger.Hook(), + middleware.Recover(), + otelecho.Middleware("reearth-backend"), + ) + origins := allowedOrigins(cfg) + if len(origins) > 0 { + e.Use( + middleware.CORSWithConfig(middleware.CORSConfig{ + AllowOrigins: origins, + }), + ) + } + + e.Use( + jwtEchoMiddleware(cfg), + parseJwtMiddleware(), + authMiddleware(cfg), + ) + + // enable pprof + if e.Debug { + pprofGroup := e.Group("/debug/pprof") + pprofGroup.Any("/cmdline", echo.WrapHandler(http.HandlerFunc(pprof.Cmdline))) + pprofGroup.Any("/profile", echo.WrapHandler(http.HandlerFunc(pprof.Profile))) + pprofGroup.Any("/symbol", echo.WrapHandler(http.HandlerFunc(pprof.Symbol))) + pprofGroup.Any("/trace", echo.WrapHandler(http.HandlerFunc(pprof.Trace))) + pprofGroup.Any("/*", echo.WrapHandler(http.HandlerFunc(pprof.Index))) + } + + // GraphQL Playground without auth + gqldev := cfg.Debug || cfg.Config.Dev + if gqldev { + e.GET("/graphql", echo.WrapHandler( + playground.Handler("reearth-backend", "/api/graphql"), + )) + log.Infof("gql: GraphQL Playground is available") + } + + // init usecases + var publishedIndexHTML string + if cfg.Config.Published.IndexURL == nil || cfg.Config.Published.IndexURL.String() == "" { + if html, err := fs.ReadFile(os.DirFS("."), "web/published.html"); err == nil { + publishedIndexHTML = string(html) + } + } + + e.Use(UsecaseMiddleware(cfg.Repos, cfg.Gateways, interactor.ContainerConfig{ + SignupSecret: cfg.Config.SignupSecret, + PublishedIndexHTML: publishedIndexHTML, + PublishedIndexURL: cfg.Config.Published.IndexURL, + AuthSrvUIDomain: cfg.Config.Host_Web, + })) + + // auth srv + if !cfg.Config.AuthSrv.Disabled { + auth := e.Group("") + authEndPoints(ctx, e, auth, cfg) + } + + // apis + api := e.Group("/api") + api.GET("/ping", Ping()) + api.POST("/graphql", GraphqlAPI(cfg.Config.GraphQL, gqldev)) + api.GET("/published/:name", PublishedMetadata()) + api.GET("/published_data/:name", PublishedData()) + api.GET("/layers/:param", ExportLayer(), AuthRequiredMiddleware()) + api.POST("/signup", Signup()) + + if !cfg.Config.AuthSrv.Disabled { + api.POST("/signup/verify", StartSignupVerify()) + api.POST("/signup/verify/:code", SignupVerify()) + api.POST("/password-reset", PasswordReset()) + } + + published := e.Group("/p", PublishedAuthMiddleware()) + published.GET("/:name/data.json", PublishedData()) + published.GET("/:name/", PublishedIndex()) + + serveFiles(e, cfg.Gateways.File) + web(e, cfg.Config.Web, cfg.Config.Auths()) + + return e +} + +func errorHandler(next func(error, echo.Context)) func(error, echo.Context) { + return func(err error, c echo.Context) { + if c.Response().Committed { + return + } + + code, msg := errorMessage(err, func(f string, args ...interface{}) { + c.Echo().Logger.Errorf(f, args...) + }) + if err := c.JSON(code, map[string]string{ + "error": msg, + }); err != nil { + next(err, c) + } + } +} + +func allowedOrigins(cfg *ServerConfig) []string { + if cfg == nil { + return nil + } + origins := append([]string{}, cfg.Config.Origins...) + if cfg.Debug { + origins = append(origins, "http://localhost:3000", "http://127.0.0.1:3000", "http://localhost:8080") + } + return origins +} + +func errorMessage(err error, log func(string, ...interface{})) (int, string) { + code := http.StatusBadRequest + msg := err.Error() + + if err2, ok := err.(*echo.HTTPError); ok { + code = err2.Code + if msg2, ok := err2.Message.(string); ok { + msg = msg2 + } else if msg2, ok := err2.Message.(error); ok { + msg = msg2.Error() + } else { + msg = "error" + } + if err2.Internal != nil { + log("echo internal err: %+v", err2) + } + } else if errors.Is(err, rerror.ErrNotFound) { + code = http.StatusNotFound + msg = "not found" + } else { + if ierr := rerror.UnwrapErrInternal(err); ierr != nil { + code = http.StatusInternalServerError + msg = "internal server error" + } + } + + return code, msg +} diff --git a/server/internal/app/auth_client.go b/server/internal/app/auth_client.go new file mode 100644 index 000000000..ee32430d0 --- /dev/null +++ b/server/internal/app/auth_client.go @@ -0,0 +1,134 @@ +package app + +import ( + "context" + + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/adapter" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/user" +) + +func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + req := c.Request() + ctx := req.Context() + + var userID string + var u *user.User + + // get sub from context + au := adapter.GetAuthInfo(ctx) + if u, ok := ctx.Value(contextUser).(string); ok { + userID = u + } + + // debug mode + if cfg.Debug { + if userID := c.Request().Header.Get(debugUserHeader); userID != "" { + if id, err := id.UserIDFrom(userID); err == nil { + user2, err := cfg.Repos.User.FindByID(ctx, id) + if err == nil && user2 != nil { + u = user2 + } + } + } + } + + if u == nil && userID != "" { + if userID2, err := id.UserIDFrom(userID); err == nil { + u, err = cfg.Repos.User.FindByID(ctx, userID2) + if err != nil && err != rerror.ErrNotFound { + return err + } + } else { + return err + } + } + + if u == nil && au != nil { + var err error + // find user + u, err = cfg.Repos.User.FindByAuth0Sub(ctx, au.Sub) + if err != nil && err != rerror.ErrNotFound { + return err + } + } + + // save a new sub + if u != nil && au != nil { + if err := addAuth0SubToUser(ctx, u, user.AuthFromAuth0Sub(au.Sub), cfg); err != nil { + return err + } + } + + if u != nil { + op, err := generateOperator(ctx, cfg, u) + if err != nil { + return err + } + + ctx = adapter.AttachUser(ctx, u) + ctx = adapter.AttachOperator(ctx, op) + } + + c.SetRequest(req.WithContext(ctx)) + return next(c) + } + } +} + +func generateOperator(ctx context.Context, cfg *ServerConfig, u *user.User) (*usecase.Operator, error) { + if u == nil { + return nil, nil + } + + uid := u.ID() + teams, err := cfg.Repos.Team.FindByUser(ctx, uid) + if err != nil { + return nil, err + } + scenes, err := cfg.Repos.Scene.FindByTeam(ctx, teams.IDs()...) + if err != nil { + return nil, err + } + + readableTeams := teams.FilterByUserRole(uid, user.RoleReader).IDs() + writableTeams := teams.FilterByUserRole(uid, user.RoleWriter).IDs() + owningTeams := teams.FilterByUserRole(uid, user.RoleOwner).IDs() + + return &usecase.Operator{ + User: uid, + ReadableTeams: readableTeams, + WritableTeams: writableTeams, + OwningTeams: owningTeams, + ReadableScenes: scenes.FilterByTeam(readableTeams...).IDs(), + WritableScenes: scenes.FilterByTeam(writableTeams...).IDs(), + OwningScenes: scenes.FilterByTeam(owningTeams...).IDs(), + }, nil +} + +func addAuth0SubToUser(ctx context.Context, u *user.User, a user.Auth, cfg *ServerConfig) error { + if u.AddAuth(a) { + err := cfg.Repos.User.Save(ctx, u) + if err != nil { + return err + } + } + return nil +} + +func AuthRequiredMiddleware() echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + ctx := c.Request().Context() + if adapter.Operator(ctx) == nil { + return echo.ErrUnauthorized + } + return next(c) + } + } +} diff --git a/server/internal/app/auth_server.go b/server/internal/app/auth_server.go new file mode 100644 index 000000000..9430fa3cd --- /dev/null +++ b/server/internal/app/auth_server.go @@ -0,0 +1,287 @@ +package app + +import ( + "context" + "crypto/sha256" + "encoding/json" + "errors" + "net/http" + "net/url" + "os" + "strconv" + + "github.com/caos/oidc/pkg/op" + "github.com/golang/gddo/httputil/header" + "github.com/gorilla/mux" + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/usecase/interactor" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/user" +) + +const ( + loginEndpoint = "api/login" + logoutEndpoint = "api/logout" + jwksEndpoint = ".well-known/jwks.json" + authProvider = "reearth" +) + +func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *ServerConfig) { + userUsecase := interactor.NewUser(cfg.Repos, cfg.Gateways, cfg.Config.SignupSecret, cfg.Config.Host_Web) + + domain := cfg.Config.AuthServeDomainURL() + if domain == nil || domain.String() == "" { + log.Panicf("auth: not valid auth domain: %s", domain) + } + domain.Path = "/" + + uidomain := cfg.Config.AuthServeUIDomainURL() + + config := &op.Config{ + Issuer: domain.String(), + CryptoKey: sha256.Sum256([]byte(cfg.Config.AuthSrv.Key)), + GrantTypeRefreshToken: true, + } + + var dn *interactor.AuthDNConfig = nil + if cfg.Config.AuthSrv.DN != nil { + dn = &interactor.AuthDNConfig{ + CommonName: cfg.Config.AuthSrv.DN.CN, + Organization: cfg.Config.AuthSrv.DN.O, + OrganizationalUnit: cfg.Config.AuthSrv.DN.OU, + Country: cfg.Config.AuthSrv.DN.C, + Locality: cfg.Config.AuthSrv.DN.L, + Province: cfg.Config.AuthSrv.DN.ST, + StreetAddress: cfg.Config.AuthSrv.DN.Street, + PostalCode: cfg.Config.AuthSrv.DN.PostalCode, + } + } + + storage, err := interactor.NewAuthStorage( + ctx, + &interactor.StorageConfig{ + Domain: domain.String(), + ClientDomain: cfg.Config.Host_Web, + Debug: cfg.Debug, + DN: dn, + }, + cfg.Repos.AuthRequest, + cfg.Repos.Config, + userUsecase.GetUserBySubject, + ) + if err != nil { + log.Fatalf("auth: init failed: %s\n", err) + } + + handler, err := op.NewOpenIDProvider( + ctx, + config, + storage, + op.WithHttpInterceptors(jsonToFormHandler()), + op.WithHttpInterceptors(setURLVarsHandler()), + op.WithCustomEndSessionEndpoint(op.NewEndpoint(logoutEndpoint)), + op.WithCustomKeysEndpoint(op.NewEndpoint(jwksEndpoint)), + ) + if err != nil { + log.Fatalf("auth: init failed: %s\n", err) + } + + router := handler.HttpHandler().(*mux.Router) + + if err := router.Walk(muxToEchoMapper(r)); err != nil { + log.Fatalf("auth: walk failed: %s\n", err) + } + + // Actual login endpoint + r.POST(loginEndpoint, login(ctx, domain, uidomain, storage, userUsecase)) + + r.GET(logoutEndpoint, logout()) + + // used for auth0/auth0-react; the logout endpoint URL is hard-coded + // can be removed when the mentioned issue is solved + // https://github.com/auth0/auth0-spa-js/issues/845 + r.GET("v2/logout", logout()) + + debugMsg := "" + if dev, ok := os.LookupEnv(op.OidcDevMode); ok { + if isDev, _ := strconv.ParseBool(dev); isDev { + debugMsg = " with debug mode" + } + } + log.Infof("auth: oidc server started%s at %s", debugMsg, domain.String()) +} + +func setURLVarsHandler() func(handler http.Handler) http.Handler { + return func(handler http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/authorize/callback" { + handler.ServeHTTP(w, r) + return + } + + r2 := mux.SetURLVars(r, map[string]string{"id": r.URL.Query().Get("id")}) + handler.ServeHTTP(w, r2) + }) + } +} + +func jsonToFormHandler() func(handler http.Handler) http.Handler { + return func(handler http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/oauth/token" { + handler.ServeHTTP(w, r) + return + } + + if r.Header.Get("Content-Type") != "" { + value, _ := header.ParseValueAndParams(r.Header, "Content-Type") + if value != "application/json" { + // Content-Type header is not application/json + handler.ServeHTTP(w, r) + return + } + } + + if err := r.ParseForm(); err != nil { + return + } + + var result map[string]string + + if err := json.NewDecoder(r.Body).Decode(&result); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + for key, value := range result { + r.Form.Set(key, value) + } + + handler.ServeHTTP(w, r) + }) + } +} + +func muxToEchoMapper(r *echo.Group) func(route *mux.Route, router *mux.Router, ancestors []*mux.Route) error { + return func(route *mux.Route, router *mux.Router, ancestors []*mux.Route) error { + path, err := route.GetPathTemplate() + if err != nil { + return err + } + + methods, err := route.GetMethods() + if err != nil { + r.Any(path, echo.WrapHandler(route.GetHandler())) + return nil + } + + for _, method := range methods { + r.Add(method, path, echo.WrapHandler(route.GetHandler())) + } + + return nil + } +} + +type loginForm struct { + Email string `json:"username" form:"username"` + Password string `json:"password" form:"password"` + AuthRequestID string `json:"id" form:"id"` +} + +func login(ctx context.Context, url, uiurl *url.URL, storage op.Storage, userUsecase interfaces.User) func(ctx echo.Context) error { + return func(ec echo.Context) error { + request := new(loginForm) + err := ec.Bind(request) + if err != nil { + log.Errorln("auth: filed to parse login request") + return ec.Redirect( + http.StatusFound, + redirectURL(uiurl, "/login", "", "Bad request!"), + ) + } + + if _, err := storage.AuthRequestByID(ctx, request.AuthRequestID); err != nil { + log.Errorf("auth: filed to parse login request: %s\n", err) + return ec.Redirect( + http.StatusFound, + redirectURL(uiurl, "/login", "", "Bad request!"), + ) + } + + if len(request.Email) == 0 || len(request.Password) == 0 { + log.Errorln("auth: one of credentials are not provided") + return ec.Redirect( + http.StatusFound, + redirectURL(uiurl, "/login", request.AuthRequestID, "Bad request!"), + ) + } + + // check user credentials from db + u, err := userUsecase.GetUserByCredentials(ctx, interfaces.GetUserByCredentials{ + Email: request.Email, + Password: request.Password, + }) + var auth *user.Auth + if err == nil { + auth = u.GetAuthByProvider(authProvider) + if auth == nil { + err = errors.New("The account is not signed up with Re:Earth") + } + } + if err != nil { + log.Errorf("auth: wrong credentials: %s\n", err) + return ec.Redirect( + http.StatusFound, + redirectURL(uiurl, "/login", request.AuthRequestID, "Login failed; Invalid user ID or password."), + ) + } + + // Complete the auth request && set the subject + err = storage.(*interactor.AuthStorage).CompleteAuthRequest(ctx, request.AuthRequestID, auth.Sub) + if err != nil { + log.Errorf("auth: failed to complete the auth request: %s\n", err) + return ec.Redirect( + http.StatusFound, + redirectURL(uiurl, "/login", request.AuthRequestID, "Bad request!"), + ) + } + + return ec.Redirect( + http.StatusFound, + redirectURL(url, "/authorize/callback", request.AuthRequestID, ""), + ) + } +} + +func logout() func(ec echo.Context) error { + return func(ec echo.Context) error { + u := ec.QueryParam("returnTo") + return ec.Redirect(http.StatusTemporaryRedirect, u) + } +} + +func redirectURL(u *url.URL, p string, requestID, err string) string { + v := cloneURL(u) + if p != "" { + v.Path = p + } + queryValues := u.Query() + queryValues.Set("id", requestID) + if err != "" { + queryValues.Set("error", err) + } + v.RawQuery = queryValues.Encode() + return v.String() +} + +func cloneURL(u *url.URL) *url.URL { + return &url.URL{ + Scheme: u.Scheme, + Opaque: u.Opaque, + User: u.User, + Host: u.Host, + Path: u.Path, + } +} diff --git a/server/internal/app/config.go b/server/internal/app/config.go new file mode 100644 index 000000000..15e5c61a5 --- /dev/null +++ b/server/internal/app/config.go @@ -0,0 +1,337 @@ +package app + +import ( + "encoding/json" + "fmt" + "net/url" + "os" + "strings" + + "github.com/caos/oidc/pkg/op" + "github.com/joho/godotenv" + "github.com/kelseyhightower/envconfig" + "github.com/reearth/reearth-backend/pkg/auth" + "github.com/reearth/reearth-backend/pkg/log" + "golang.org/x/oauth2" + "golang.org/x/oauth2/clientcredentials" +) + +const configPrefix = "reearth" + +type Config struct { + Port string `default:"8080" envconfig:"PORT"` + ServerHost string + Host string `default:"http://localhost:8080"` + Host_Web string + Dev bool + DB string `default:"mongodb://localhost"` + Mailer string + SMTP SMTPConfig + SendGrid SendGridConfig + GraphQL GraphQLConfig + Published PublishedConfig + GCPProject string `envconfig:"GOOGLE_CLOUD_PROJECT"` + Profiler string + Tracer string + TracerSample float64 + GCS GCSConfig + Marketplace MarketplaceConfig + AssetBaseURL string `default:"http://localhost:8080/assets"` + Origins []string + Web WebConfig + SignupSecret string + SignupDisabled bool + // auth + Auth AuthConfigs + Auth0 Auth0Config + AuthSrv AuthSrvConfig + Auth_ISS string + Auth_AUD string + Auth_ALG *string + Auth_TTL *int + Auth_ClientID *string +} + +type Auth0Config struct { + Domain string + Audience string + ClientID string + ClientSecret string + WebClientID string +} + +type AuthSrvConfig struct { + Dev bool + Disabled bool + Domain string + UIDomain string + Key string + DN *AuthSrvDNConfig +} + +func (c AuthSrvConfig) AuthConfig(debug bool, host string) *AuthConfig { + if c.Disabled { + return nil + } + + domain := c.Domain + if domain == "" { + domain = host + } + + var aud []string + if debug && host != "" && c.Domain != "" && c.Domain != host { + aud = []string{host, c.Domain} + } else { + aud = []string{domain} + } + + clientID := auth.ClientID + + return &AuthConfig{ + ISS: domain, + AUD: aud, + ClientID: &clientID, + } +} + +type AuthSrvDNConfig struct { + CN string + O []string + OU []string + C []string + L []string + ST []string + Street []string + PostalCode []string +} + +type GraphQLConfig struct { + ComplexityLimit int `default:"6000"` +} + +type PublishedConfig struct { + IndexURL *url.URL +} + +type GCSConfig struct { + BucketName string + PublicationCacheControl string +} + +type SendGridConfig struct { + Email string + Name string + API string +} + +type SMTPConfig struct { + Host string + Port string + SMTPUsername string + Email string + Password string +} + +func ReadConfig(debug bool) (*Config, error) { + // load .env + if err := godotenv.Load(".env"); err != nil && !os.IsNotExist(err) { + return nil, err + } else if err == nil { + log.Infof("config: .env loaded") + } + + var c Config + err := envconfig.Process(configPrefix, &c) + + // overwrite env vars + if !c.AuthSrv.Disabled && (c.Dev || c.AuthSrv.Dev || c.AuthSrv.Domain == "") { + if _, ok := os.LookupEnv(op.OidcDevMode); !ok { + _ = os.Setenv(op.OidcDevMode, "1") + } + } + + // default values + if debug { + c.Dev = true + } + c.Host = addHTTPScheme(c.Host) + if c.Host_Web == "" { + c.Host_Web = c.Host + } else { + c.Host_Web = addHTTPScheme(c.Host_Web) + } + if c.AuthSrv.Domain == "" { + c.AuthSrv.Domain = c.Host + } else { + c.AuthSrv.Domain = addHTTPScheme(c.AuthSrv.Domain) + } + if c.Host_Web == "" { + c.Host_Web = c.Host + } + if c.AuthSrv.UIDomain == "" { + c.AuthSrv.UIDomain = c.Host_Web + } else { + c.AuthSrv.UIDomain = addHTTPScheme(c.AuthSrv.UIDomain) + } + + return &c, err +} + +func (c Config) Print() string { + s := fmt.Sprintf("%+v", c) + for _, secret := range []string{c.DB, c.Auth0.ClientSecret} { + if secret == "" { + continue + } + s = strings.ReplaceAll(s, secret, "***") + } + return s +} + +func (c Config) Auths() (res []AuthConfig) { + if ac := c.Auth0.AuthConfig(); ac != nil { + res = append(res, *ac) + } + if c.Auth_ISS != "" { + var aud []string + if len(c.Auth_AUD) > 0 { + aud = append(aud, c.Auth_AUD) + } + res = append(res, AuthConfig{ + ISS: c.Auth_ISS, + AUD: aud, + ALG: c.Auth_ALG, + TTL: c.Auth_TTL, + ClientID: c.Auth_ClientID, + }) + } + if ac := c.AuthSrv.AuthConfig(c.Dev, c.Host); ac != nil { + res = append(res, *ac) + } + return append(res, c.Auth...) +} + +func prepareUrl(url string) string { + if !strings.HasPrefix(url, "https://") && !strings.HasPrefix(url, "http://") { + url = "https://" + url + } + url = strings.TrimSuffix(url, "/") + return url +} + +func (c Auth0Config) AuthConfig() *AuthConfig { + if c.Domain == "" { + return nil + } + domain := prepareUrl(c.Domain) + var aud []string + if len(c.Audience) > 0 { + aud = []string{c.Audience} + } + return &AuthConfig{ + ISS: domain, + AUD: aud, + ClientID: &c.ClientID, + } +} + +type AuthConfig struct { + ISS string + AUD []string + ALG *string + TTL *int + ClientID *string +} + +type AuthConfigs []AuthConfig + +// Decode is a custom decoder for AuthConfigs +func (ipd *AuthConfigs) Decode(value string) error { + if value == "" { + return nil + } + + var providers []AuthConfig + + err := json.Unmarshal([]byte(value), &providers) + if err != nil { + return fmt.Errorf("invalid identity providers json: %w", err) + } + + *ipd = providers + return nil +} + +func (c Config) HostURL() *url.URL { + u, err := url.Parse(c.Host) + if err != nil { + u = nil + } + return u +} + +func (c Config) HostWebURL() *url.URL { + u, err := url.Parse(c.Host_Web) + if err != nil { + u = nil + } + return u +} + +func (c Config) AuthServeDomainURL() *url.URL { + u, err := url.Parse(c.AuthSrv.Domain) + if err != nil { + u = nil + } + return u +} + +func (c Config) AuthServeUIDomainURL() *url.URL { + u, err := url.Parse(c.AuthSrv.UIDomain) + if err != nil { + u = nil + } + return u +} + +func addHTTPScheme(host string) string { + if host == "" { + return "" + } + if !strings.HasPrefix(host, "https://") && !strings.HasPrefix(host, "http://") { + host = "http://" + host + } + return host +} + +type MarketplaceConfig struct { + Endpoint string + OAuth OAuthClientCredentialsConfig +} + +type OAuthClientCredentialsConfig struct { + ClientID string + ClientSecret string + TokenURL string + Scopes []string + Audience []string +} + +func (c OAuthClientCredentialsConfig) Config() clientcredentials.Config { + var params url.Values + if len(c.Audience) > 0 { + params = url.Values{ + "audience": c.Audience, + } + } + + return clientcredentials.Config{ + ClientID: c.ClientID, + ClientSecret: c.ClientSecret, + TokenURL: c.TokenURL, + Scopes: c.Scopes, + AuthStyle: oauth2.AuthStyleInParams, + EndpointParams: params, + } +} diff --git a/server/internal/app/config_test.go b/server/internal/app/config_test.go new file mode 100644 index 000000000..d19faab23 --- /dev/null +++ b/server/internal/app/config_test.go @@ -0,0 +1,70 @@ +package app + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/auth" + "github.com/stretchr/testify/assert" +) + +func TestAuth0Config_AuthConfig(t *testing.T) { + s := "" + assert.Equal(t, &AuthConfig{ + ISS: "https://hoge.auth0.com", + AUD: []string{"xxx"}, + ClientID: &s, + }, Auth0Config{ + Domain: "hoge.auth0.com/", + Audience: "xxx", + }.AuthConfig()) + assert.Nil(t, Auth0Config{ + Domain: "", + Audience: "xxx", + }.AuthConfig()) +} + +func TestReadConfig(t *testing.T) { + clientID := auth.ClientID + localAuth := AuthConfig{ + ISS: "http://localhost:8080", + AUD: []string{"http://localhost:8080"}, + ClientID: &clientID, + } + + cfg, err := ReadConfig(false) + assert.NoError(t, err) + assert.Nil(t, cfg.Auth) + assert.Equal(t, []AuthConfig{localAuth}, cfg.Auths()) + + t.Setenv("REEARTH_AUTH", `[{"iss":"bar"}]`) + t.Setenv("REEARTH_AUTH_ISS", "hoge") + cfg, err = ReadConfig(false) + assert.NoError(t, err) + assert.Equal(t, AuthConfigs([]AuthConfig{{ISS: "bar"}}), cfg.Auth) + assert.Equal(t, []AuthConfig{ + {ISS: "hoge"}, // REEARTH_AUTH_* + localAuth, // local auth srv + {ISS: "bar"}, // REEARTH_AUTH + }, cfg.Auths()) + assert.Equal(t, "hoge", cfg.Auth_ISS) + assert.Equal(t, "", cfg.Auth_AUD) + + t.Setenv("REEARTH_AUTH_AUD", "foo") + t.Setenv("REEARTH_AUTH0_DOMAIN", "foo") + t.Setenv("REEARTH_AUTH0_CLIENTID", clientID) + cfg, err = ReadConfig(false) + assert.NoError(t, err) + assert.Equal(t, []AuthConfig{ + {ISS: "https://foo", ClientID: &clientID}, // Auth0 + {ISS: "hoge", AUD: []string{"foo"}}, // REEARTH_AUTH_* + localAuth, // local auth srv + {ISS: "bar"}, // REEARTH_AUTH + }, cfg.Auths()) + assert.Equal(t, "foo", cfg.Auth_AUD) +} + +func Test_AddHTTPScheme(t *testing.T) { + assert.Equal(t, "http://a", addHTTPScheme("a")) + assert.Equal(t, "http://a", addHTTPScheme("http://a")) + assert.Equal(t, "https://a", addHTTPScheme("https://a")) +} diff --git a/server/internal/app/echo-logrus.go b/server/internal/app/echo-logrus.go new file mode 100644 index 000000000..340fd07ce --- /dev/null +++ b/server/internal/app/echo-logrus.go @@ -0,0 +1,217 @@ +package app + +// https://github.com/plutov/echo-logrus with some modifications +// MIT License +// Copyright (c) 2017 Alex Pliutau + +import ( + "io" + "time" + + "github.com/labstack/echo/v4" + "github.com/labstack/gommon/log" + "github.com/sirupsen/logrus" +) + +// Logrus : implement Logger +type Logger struct{} + +var _ echo.Logger = new(Logger) + +// GetEchoLogger for e.Logger +func GetEchoLogger() *Logger { + return &Logger{} +} + +// Level returns logger level +func (l *Logger) Level() log.Lvl { + switch logrus.StandardLogger().Level { + case logrus.DebugLevel: + return log.DEBUG + case logrus.WarnLevel: + return log.WARN + case logrus.ErrorLevel: + return log.ERROR + case logrus.InfoLevel: + return log.INFO + default: + l.Panic("Invalid level") + } + return log.OFF +} + +// SetHeader is a stub to satisfy interface +// It's controlled by Logger +func (l *Logger) SetHeader(_ string) {} + +// SetPrefix It's controlled by Logger +func (l *Logger) SetPrefix(s string) {} + +// Prefix It's controlled by Logger +func (l *Logger) Prefix() string { + return "" +} + +// SetLevel set level to logger from given log.Lvl +func (l *Logger) SetLevel(lvl log.Lvl) { + switch lvl { + case log.DEBUG: + logrus.SetLevel(logrus.DebugLevel) + case log.WARN: + logrus.SetLevel(logrus.WarnLevel) + case log.ERROR: + logrus.SetLevel(logrus.ErrorLevel) + case log.INFO: + logrus.SetLevel(logrus.InfoLevel) + default: + l.Panic("Invalid level") + } +} + +// Output logger output func +func (l *Logger) Output() io.Writer { + return logrus.StandardLogger().Out +} + +// SetOutput change output, default os.Stdout +func (l *Logger) SetOutput(w io.Writer) { + logrus.SetOutput(w) +} + +// Printj print json log +func (l *Logger) Printj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Print() +} + +// Debugj debug json log +func (l *Logger) Debugj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Debug() +} + +// Infoj info json log +func (l *Logger) Infoj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Info() +} + +// Warnj warning json log +func (l *Logger) Warnj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Warn() +} + +// Errorj error json log +func (l *Logger) Errorj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Error() +} + +// Fatalj fatal json log +func (l *Logger) Fatalj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Fatal() +} + +// Panicj panic json log +func (l *Logger) Panicj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Panic() +} + +// Print string log +func (l *Logger) Print(i ...interface{}) { + logrus.Print(i...) +} + +// Debug string log +func (l *Logger) Debug(i ...interface{}) { + logrus.Debug(i...) +} + +// Info string log +func (l *Logger) Info(i ...interface{}) { + logrus.Info(i...) +} + +// Warn string log +func (l *Logger) Warn(i ...interface{}) { + logrus.Warn(i...) +} + +// Error string log +func (l *Logger) Error(i ...interface{}) { + logrus.Error(i...) +} + +// Fatal string log +func (l *Logger) Fatal(i ...interface{}) { + logrus.Fatal(i...) +} + +// Panic string log +func (l *Logger) Panic(i ...interface{}) { + logrus.Panic(i...) +} + +// Printf print json log +func (l *Logger) Printf(format string, args ...interface{}) { + logrus.Printf(format, args...) +} + +// Debugf debug json log +func (l *Logger) Debugf(format string, args ...interface{}) { + logrus.Debugf(format, args...) +} + +// Infof info json log +func (l *Logger) Infof(format string, args ...interface{}) { + logrus.Infof(format, args...) +} + +// Warnf warning json log +func (l *Logger) Warnf(format string, args ...interface{}) { + logrus.Warnf(format, args...) +} + +// Errorf error json log +func (l *Logger) Errorf(format string, args ...interface{}) { + logrus.Errorf(format, args...) +} + +// Fatalf fatal json log +func (l *Logger) Fatalf(format string, args ...interface{}) { + logrus.Fatalf(format, args...) +} + +// Panicf panic json log +func (l *Logger) Panicf(format string, args ...interface{}) { + logrus.Panicf(format, args...) +} + +// Hook is a function to process middleware. +func (l *Logger) Hook() echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + req := c.Request() + res := c.Response() + start := time.Now() + if err := next(c); err != nil { + c.Error(err) + } + stop := time.Now() + + logrus.WithFields(map[string]interface{}{ + "time_rfc3339": time.Now().Format(time.RFC3339), + "remote_ip": c.RealIP(), + "host": req.Host, + "uri": req.RequestURI, + "method": req.Method, + "path": req.URL.Path, + "referer": req.Referer(), + "user_agent": req.UserAgent(), + "status": res.Status, + "latency": stop.Sub(start).Microseconds(), + "latency_human": stop.Sub(start).String(), + "bytes_in": req.ContentLength, + "bytes_out": res.Size, + }).Info("Handled request") + + return nil + } + } +} diff --git a/server/internal/app/file.go b/server/internal/app/file.go new file mode 100644 index 000000000..7bfb7b095 --- /dev/null +++ b/server/internal/app/file.go @@ -0,0 +1,70 @@ +package app + +import ( + "io" + "mime" + "net/http" + "path" + + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +func serveFiles( + ec *echo.Echo, + repo gateway.File, +) { + if repo == nil { + return + } + + fileHandler := func(handler func(echo.Context) (io.Reader, string, error)) echo.HandlerFunc { + return func(ctx echo.Context) error { + reader, filename, err := handler(ctx) + if err != nil { + return err + } + ct := "application/octet-stream" + if ext := path.Ext(filename); ext != "" { + ct2 := mime.TypeByExtension(ext) + if ct2 != "" { + ct = ct2 + } + } + return ctx.Stream(http.StatusOK, ct, reader) + } + } + + ec.GET( + "/assets/:filename", + fileHandler(func(ctx echo.Context) (io.Reader, string, error) { + filename := ctx.Param("filename") + r, err := repo.ReadAsset(ctx.Request().Context(), filename) + return r, filename, err + }), + ) + + ec.GET( + "/plugins/:plugin/:filename", + fileHandler(func(ctx echo.Context) (io.Reader, string, error) { + pid, err := id.PluginIDFrom(ctx.Param("plugin")) + if err != nil { + return nil, "", rerror.ErrNotFound + } + filename := ctx.Param("filename") + r, err := repo.ReadPluginFile(ctx.Request().Context(), pid, filename) + return r, filename, err + }), + ) + + ec.GET( + "/published/:name", + fileHandler(func(ctx echo.Context) (io.Reader, string, error) { + name := ctx.Param("name") + r, err := repo.ReadBuiltSceneFile(ctx.Request().Context(), name) + return r, name + ".json", err + }), + ) +} diff --git a/server/internal/app/graphql.go b/server/internal/app/graphql.go new file mode 100644 index 000000000..22835f043 --- /dev/null +++ b/server/internal/app/graphql.go @@ -0,0 +1,63 @@ +package app + +import ( + "context" + + "github.com/99designs/gqlgen/graphql" + "github.com/99designs/gqlgen/graphql/handler" + "github.com/99designs/gqlgen/graphql/handler/extension" + "github.com/99designs/gqlgen/graphql/handler/lru" + "github.com/labstack/echo/v4" + "github.com/ravilushqa/otelgqlgen" + "github.com/reearth/reearth-backend/internal/adapter" + "github.com/reearth/reearth-backend/internal/adapter/gql" + "github.com/vektah/gqlparser/v2/gqlerror" +) + +const enableDataLoaders = true + +func GraphqlAPI( + conf GraphQLConfig, + dev bool, +) echo.HandlerFunc { + schema := gql.NewExecutableSchema(gql.Config{ + Resolvers: gql.NewResolver(), + }) + + srv := handler.NewDefaultServer(schema) + srv.Use(otelgqlgen.Middleware()) + + if conf.ComplexityLimit > 0 { + srv.Use(extension.FixedComplexityLimit(conf.ComplexityLimit)) + } + + if dev { + srv.Use(extension.Introspection{}) + } + + srv.Use(extension.AutomaticPersistedQuery{ + Cache: lru.New(30), + }) + + srv.SetErrorPresenter( + // show more detailed error messgage in debug mode + func(ctx context.Context, e error) *gqlerror.Error { + if dev { + return gqlerror.ErrorPathf(graphql.GetFieldContext(ctx).Path(), e.Error()) + } + return graphql.DefaultErrorPresenter(ctx, e) + }, + ) + + return func(c echo.Context) error { + req := c.Request() + ctx := req.Context() + + usecases := adapter.Usecases(ctx) + ctx = gql.AttachUsecases(ctx, usecases, enableDataLoaders) + c.SetRequest(req.WithContext(ctx)) + + srv.ServeHTTP(c.Response(), c.Request()) + return nil + } +} diff --git a/server/internal/app/jwt.go b/server/internal/app/jwt.go new file mode 100644 index 000000000..58c678eed --- /dev/null +++ b/server/internal/app/jwt.go @@ -0,0 +1,139 @@ +package app + +import ( + "context" + "fmt" + "net/url" + "strings" + "time" + + jwtmiddleware "github.com/auth0/go-jwt-middleware/v2" + "github.com/auth0/go-jwt-middleware/v2/jwks" + "github.com/auth0/go-jwt-middleware/v2/validator" + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/adapter" + "github.com/reearth/reearth-backend/pkg/log" +) + +type contextKey string + +const ( + debugUserHeader = "X-Reearth-Debug-User" + contextUser contextKey = "reearth_user" + defaultJWTTTL = 5 * time.Minute +) + +type customClaims struct { + Name string `json:"name"` + Nickname string `json:"nickname"` + Email string `json:"email"` + EmailVerified *bool `json:"email_verified"` +} + +func (c *customClaims) Validate(ctx context.Context) error { + return nil +} + +type MultiValidator []*validator.Validator + +func NewMultiValidator(providers []AuthConfig) (MultiValidator, error) { + validators := make([]*validator.Validator, 0, len(providers)) + for _, p := range providers { + issuerURL, err := url.Parse(p.ISS) + issuerURL.Path = "/" + if err != nil { + return nil, fmt.Errorf("failed to parse the issuer url: %w", err) + } + + var ttl time.Duration + if p.TTL != nil { + ttl = time.Duration(*p.TTL) * time.Minute + } else { + ttl = defaultJWTTTL + } + provider := jwks.NewCachingProvider(issuerURL, ttl) + + alg := "RS256" + if p.ALG != nil && *p.ALG != "" { + alg = *p.ALG + } + algorithm := validator.SignatureAlgorithm(alg) + + var aud []string + if p.AUD != nil { + aud = p.AUD + } else { + aud = []string{} + } + + v, err := validator.New( + provider.KeyFunc, + algorithm, + issuerURL.String(), + aud, + validator.WithCustomClaims(func() validator.CustomClaims { + return &customClaims{} + }), + ) + if err != nil { + return nil, err + } + validators = append(validators, v) + } + return validators, nil +} + +// ValidateToken Trys to validate the token with each validator +// NOTE: the last validation error only is returned +func (mv MultiValidator) ValidateToken(ctx context.Context, tokenString string) (res interface{}, err error) { + for _, v := range mv { + res, err = v.ValidateToken(ctx, tokenString) + if err == nil { + return + } + } + return +} + +// Validate the access token and inject the user clams into ctx +func jwtEchoMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { + jwtValidator, err := NewMultiValidator(cfg.Config.Auths()) + if err != nil { + log.Fatalf("failed to set up the validator: %v", err) + } + + middleware := jwtmiddleware.New(jwtValidator.ValidateToken, jwtmiddleware.WithCredentialsOptional(true)) + + return echo.WrapMiddleware(middleware.CheckJWT) +} + +// load claim from ctx and inject the user sub into ctx +func parseJwtMiddleware() echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + req := c.Request() + ctx := req.Context() + + rawClaims := ctx.Value(jwtmiddleware.ContextKey{}) + if claims, ok := rawClaims.(*validator.ValidatedClaims); ok { + // attach auth info to context + customClaims := claims.CustomClaims.(*customClaims) + name := customClaims.Nickname + if name == "" { + name = customClaims.Name + } + ctx = adapter.AttachAuthInfo(ctx, adapter.AuthInfo{ + Token: strings.TrimPrefix(c.Request().Header.Get("Authorization"), "Bearer "), + Sub: claims.RegisteredClaims.Subject, + Iss: claims.RegisteredClaims.Issuer, + Name: name, + Email: customClaims.Email, + EmailVerified: customClaims.EmailVerified, + }) + } + + c.SetRequest(req.WithContext(ctx)) + return next(c) + } + } +} diff --git a/server/internal/app/main.go b/server/internal/app/main.go new file mode 100644 index 000000000..18415a05e --- /dev/null +++ b/server/internal/app/main.go @@ -0,0 +1,104 @@ +package app + +import ( + "context" + "os" + "os/signal" + + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/log" +) + +func Start(debug bool, version string) { + log.Infof("reearth-backend %s", version) + + ctx := context.Background() + + // Load config + conf, cerr := ReadConfig(debug) + if cerr != nil { + log.Fatal(cerr) + } + log.Infof("config: %s", conf.Print()) + + // Init profiler + initProfiler(conf.Profiler, version) + + // Init tracer + closer := initTracer(ctx, conf) + defer func() { + if closer != nil { + if err := closer.Close(); err != nil { + log.Errorf("Failed to close tracer: %s\n", err.Error()) + } + } + }() + + // Init repositories + repos, gateways := initReposAndGateways(ctx, conf, debug) + + // Start web server + NewServer(ctx, &ServerConfig{ + Config: conf, + Debug: debug, + Repos: repos, + Gateways: gateways, + }).Run() +} + +type WebServer struct { + address string + appServer *echo.Echo +} + +type ServerConfig struct { + Config *Config + Debug bool + Repos *repo.Container + Gateways *gateway.Container +} + +func NewServer(ctx context.Context, cfg *ServerConfig) *WebServer { + port := cfg.Config.Port + if port == "" { + port = "8080" + } + + host := cfg.Config.ServerHost + if host == "" { + if cfg.Debug { + host = "localhost" + } else { + host = "0.0.0.0" + } + } + address := host + ":" + port + + w := &WebServer{ + address: address, + } + + w.appServer = initEcho(ctx, cfg) + return w +} + +func (w *WebServer) Run() { + defer log.Infoln("Server shutdown") + + debugLog := "" + if w.appServer.Debug { + debugLog += " with debug mode" + } + log.Infof("server started%s at http://%s\n", debugLog, w.address) + + go func() { + err := w.appServer.Start(w.address) + log.Fatalln(err.Error()) + }() + + quit := make(chan os.Signal, 1) + signal.Notify(quit, os.Interrupt) + <-quit +} diff --git a/server/internal/app/private.go b/server/internal/app/private.go new file mode 100644 index 000000000..11a731bc4 --- /dev/null +++ b/server/internal/app/private.go @@ -0,0 +1,36 @@ +package app + +import ( + "net/http" + "strings" + + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/adapter" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +func ExportLayer() echo.HandlerFunc { + return func(c echo.Context) error { + ctx := c.Request().Context() + u := adapter.Usecases(ctx) + + param := c.Param("param") + params := strings.Split(param, ".") + if len(params) != 2 { + return rerror.ErrNotFound + } + + lid, err := id.LayerIDFrom(params[0]) + if err != nil { + return rerror.ErrNotFound + } + + reader, mime, err := u.Layer.Export(ctx, lid, params[1]) + if err != nil { + return err + } + + return c.Stream(http.StatusOK, mime, reader) + } +} diff --git a/server/internal/app/profiler.go b/server/internal/app/profiler.go new file mode 100644 index 000000000..6b5bb3d17 --- /dev/null +++ b/server/internal/app/profiler.go @@ -0,0 +1,27 @@ +package app + +import ( + "cloud.google.com/go/profiler" + "github.com/reearth/reearth-backend/pkg/log" +) + +func initProfiler(kind string, version string) { + if kind == "" { + return + } + + if kind == "gcp" { + initGCPProfiler(version) + } + + log.Infof("profiler: %s initialized\n", kind) +} + +func initGCPProfiler(version string) { + if err := profiler.Start(profiler.Config{ + Service: "reearth-backend", + ServiceVersion: version, + }); err != nil { + log.Fatalln(err) + } +} diff --git a/server/internal/app/public.go b/server/internal/app/public.go new file mode 100644 index 000000000..fedca4d12 --- /dev/null +++ b/server/internal/app/public.go @@ -0,0 +1,214 @@ +package app + +import ( + "context" + "crypto/subtle" + "fmt" + "net/http" + "net/url" + + "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" + "github.com/reearth/reearth-backend/internal/adapter" + http1 "github.com/reearth/reearth-backend/internal/adapter/http" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +func Ping() echo.HandlerFunc { + return func(c echo.Context) error { + return c.JSON(http.StatusOK, "pong") + } +} + +func Signup() echo.HandlerFunc { + return func(c echo.Context) error { + var inp http1.SignupInput + if err := c.Bind(&inp); err != nil { + return &echo.HTTPError{Code: http.StatusBadRequest, Message: fmt.Errorf("failed to parse request body: %w", err)} + } + + uc := adapter.Usecases(c.Request().Context()) + controller := http1.NewUserController(uc.User) + + output, err := controller.Signup(c.Request().Context(), inp) + if err != nil { + return err + } + + return c.JSON(http.StatusOK, output) + } +} + +func PasswordReset() echo.HandlerFunc { + return func(c echo.Context) error { + var inp http1.PasswordResetInput + if err := c.Bind(&inp); err != nil { + return err + } + + uc := adapter.Usecases(c.Request().Context()) + controller := http1.NewUserController(uc.User) + + isStartingNewRequest := len(inp.Email) > 0 && len(inp.Token) == 0 && len(inp.Password) == 0 + isSettingNewPassword := len(inp.Email) > 0 && len(inp.Token) > 0 && len(inp.Password) > 0 + + if isStartingNewRequest { + if err := controller.StartPasswordReset(c.Request().Context(), inp); err != nil { + c.Logger().Error("an attempt to start reset password failed. internal error: %w", err) + } + return c.JSON(http.StatusOK, echo.Map{"message": "If that email address is in our database, we will send you an email to reset your password."}) + } + + if isSettingNewPassword { + if err := controller.PasswordReset(c.Request().Context(), inp); err != nil { + c.Logger().Error("an attempt to Set password failed. internal error: %w", err) + return c.JSON(http.StatusBadRequest, echo.Map{"message": "Bad set password request"}) + } + return c.JSON(http.StatusOK, echo.Map{"message": "Password is updated successfully"}) + } + + return &echo.HTTPError{Code: http.StatusBadRequest, Message: "Bad reset password request"} + } +} + +func StartSignupVerify() echo.HandlerFunc { + return func(c echo.Context) error { + var inp http1.CreateVerificationInput + if err := c.Bind(&inp); err != nil { + return &echo.HTTPError{Code: http.StatusBadRequest, Message: fmt.Errorf("failed to parse request body: %w", err)} + } + + uc := adapter.Usecases(c.Request().Context()) + controller := http1.NewUserController(uc.User) + + if err := controller.CreateVerification(c.Request().Context(), inp); err != nil { + return err + } + + return c.NoContent(http.StatusOK) + } +} + +func SignupVerify() echo.HandlerFunc { + return func(c echo.Context) error { + code := c.Param("code") + if len(code) == 0 { + return echo.ErrBadRequest + } + + uc := adapter.Usecases(c.Request().Context()) + controller := http1.NewUserController(uc.User) + + output, err := controller.VerifyUser(c.Request().Context(), code) + if err != nil { + return err + } + + return c.JSON(http.StatusOK, output) + } +} + +func PublishedMetadata() echo.HandlerFunc { + return func(c echo.Context) error { + name := c.Param("name") + if name == "" { + return rerror.ErrNotFound + } + + contr, err := publishedController(c) + if err != nil { + return err + } + + res, err := contr.Metadata(c.Request().Context(), name) + if err != nil { + return err + } + + return c.JSON(http.StatusOK, res) + } +} + +func PublishedData() echo.HandlerFunc { + return func(c echo.Context) error { + name := c.Param("name") + if name == "" { + return rerror.ErrNotFound + } + + contr, err := publishedController(c) + if err != nil { + return err + } + + r, err := contr.Data(c.Request().Context(), name) + if err != nil { + return err + } + + return c.Stream(http.StatusOK, "application/json", r) + } +} + +func PublishedIndex() echo.HandlerFunc { + return func(c echo.Context) error { + contr, err := publishedController(c) + if err != nil { + return err + } + + index, err := contr.Index(c.Request().Context(), c.Param("name"), &url.URL{ + Scheme: "http", + Host: c.Request().Host, + Path: c.Request().URL.Path, + }) + if err != nil { + return err + } + if index == "" { + return rerror.ErrNotFound + } + return c.HTML(http.StatusOK, index) + } +} + +func PublishedAuthMiddleware() echo.MiddlewareFunc { + key := struct{}{} + return middleware.BasicAuthWithConfig(middleware.BasicAuthConfig{ + Validator: func(user string, password string, c echo.Context) (bool, error) { + md, ok := c.Request().Context().Value(key).(interfaces.ProjectPublishedMetadata) + if !ok { + return true, echo.ErrNotFound + } + return !md.IsBasicAuthActive || subtle.ConstantTimeCompare([]byte(user), []byte(md.BasicAuthUsername)) == 1 && subtle.ConstantTimeCompare([]byte(password), []byte(md.BasicAuthPassword)) == 1, nil + }, + Skipper: func(c echo.Context) bool { + name := c.Param("name") + if name == "" { + return true + } + + contr, err := publishedController(c) + if err != nil { + return false + } + + md, err := contr.Metadata(c.Request().Context(), name) + if err != nil { + return true + } + + c.SetRequest(c.Request().WithContext(context.WithValue(c.Request().Context(), key, md))) + return !md.IsBasicAuthActive + }, + }) +} + +func publishedController(c echo.Context) (*http1.PublishedController, error) { + uc := adapter.Usecases(c.Request().Context()) + if uc.Published == nil { + return nil, rerror.ErrNotFound + } + return http1.NewPublishedController(uc.Published), nil +} diff --git a/server/internal/app/public_test.go b/server/internal/app/public_test.go new file mode 100644 index 000000000..f7ff0819c --- /dev/null +++ b/server/internal/app/public_test.go @@ -0,0 +1,239 @@ +package app + +import ( + "context" + "encoding/base64" + "io" + "net/http" + "net/http/httptest" + "net/url" + "strings" + "testing" + + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/adapter" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/stretchr/testify/assert" +) + +func TestPublishedAuthMiddleware(t *testing.T) { + tests := []struct { + Name string + PublishedName string + BasicAuthUsername string + BasicAuthPassword string + Error error + }{ + { + Name: "empty name", + }, + { + Name: "not found", + PublishedName: "aaa", + }, + { + Name: "no auth", + PublishedName: "inactive", + }, + { + Name: "auth", + PublishedName: "active", + Error: echo.ErrUnauthorized, + }, + { + Name: "auth with invalid credentials", + PublishedName: "active", + BasicAuthUsername: "aaa", + BasicAuthPassword: "bbb", + Error: echo.ErrUnauthorized, + }, + { + Name: "auth with valid credentials", + PublishedName: "active", + BasicAuthUsername: "fooo", + BasicAuthPassword: "baar", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + assert := assert.New(t) + req := httptest.NewRequest(http.MethodGet, "/", nil) + if tc.BasicAuthUsername != "" { + req.Header.Set(echo.HeaderAuthorization, "basic "+base64.StdEncoding.EncodeToString([]byte(tc.BasicAuthUsername+":"+tc.BasicAuthPassword))) + } + res := httptest.NewRecorder() + e := echo.New() + c := e.NewContext(req, res) + c.SetParamNames("name") + c.SetParamValues(tc.PublishedName) + m := mockPublishedUsecaseMiddleware(false) + + err := m(PublishedAuthMiddleware()(func(c echo.Context) error { + return c.String(http.StatusOK, "test") + }))(c) + if tc.Error == nil { + assert.NoError(err) + assert.Equal(http.StatusOK, res.Code) + assert.Equal("test", res.Body.String()) + } else { + assert.ErrorIs(err, tc.Error) + } + }) + } +} + +func TestPublishedData(t *testing.T) { + tests := []struct { + Name string + PublishedName string + Error error + }{ + { + Name: "empty", + Error: rerror.ErrNotFound, + }, + { + Name: "not found", + PublishedName: "pr", + Error: rerror.ErrNotFound, + }, + { + Name: "ok", + PublishedName: "prj", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + assert := assert.New(t) + req := httptest.NewRequest(http.MethodGet, "/", nil) + res := httptest.NewRecorder() + e := echo.New() + c := e.NewContext(req, res) + c.SetParamNames("name") + c.SetParamValues(tc.PublishedName) + m := mockPublishedUsecaseMiddleware(false) + + err := m(PublishedData())(c) + + if tc.Error == nil { + assert.NoError(err) + assert.Equal(http.StatusOK, res.Code) + assert.Equal("application/json", res.Header().Get(echo.HeaderContentType)) + assert.Equal("aaa", res.Body.String()) + } else { + assert.ErrorIs(err, tc.Error) + } + }) + } +} + +func TestPublishedIndex(t *testing.T) { + tests := []struct { + Name string + PublishedName string + Error error + EmptyIndex bool + }{ + { + Name: "empty", + Error: rerror.ErrNotFound, + }, + { + Name: "empty index", + Error: rerror.ErrNotFound, + EmptyIndex: true, + }, + { + Name: "not found", + PublishedName: "pr", + Error: rerror.ErrNotFound, + }, + { + Name: "ok", + PublishedName: "prj", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + assert := assert.New(t) + req := httptest.NewRequest(http.MethodGet, "/aaa/bbb", nil) + res := httptest.NewRecorder() + e := echo.New() + c := e.NewContext(req, res) + c.SetParamNames("name") + c.SetParamValues(tc.PublishedName) + m := mockPublishedUsecaseMiddleware(tc.EmptyIndex) + + err := m(PublishedIndex())(c) + + if tc.Error == nil { + assert.NoError(err) + assert.Equal(http.StatusOK, res.Code) + assert.Equal("text/html; charset=UTF-8", res.Header().Get(echo.HeaderContentType)) + assert.Equal("index", res.Body.String()) + } else { + assert.ErrorIs(err, tc.Error) + } + }) + } +} + +func mockPublishedUsecaseMiddleware(emptyIndex bool) echo.MiddlewareFunc { + return ContextMiddleware(func(ctx context.Context) context.Context { + return adapter.AttachUsecases(ctx, &interfaces.Container{ + Published: &mockPublished{EmptyIndex: emptyIndex}, + }) + }) +} + +type mockPublished struct { + interfaces.Published + EmptyIndex bool +} + +func (p *mockPublished) Metadata(ctx context.Context, name string) (interfaces.ProjectPublishedMetadata, error) { + if name == "active" { + return interfaces.ProjectPublishedMetadata{ + IsBasicAuthActive: true, + BasicAuthUsername: "fooo", + BasicAuthPassword: "baar", + }, nil + } else if name == "inactive" { + return interfaces.ProjectPublishedMetadata{ + IsBasicAuthActive: false, + BasicAuthUsername: "fooo", + BasicAuthPassword: "baar", + }, nil + } + return interfaces.ProjectPublishedMetadata{}, rerror.ErrNotFound +} + +func (p *mockPublished) Data(ctx context.Context, name string) (io.Reader, error) { + if name == "prj" { + return strings.NewReader("aaa"), nil + } + return nil, rerror.ErrNotFound +} + +func (p *mockPublished) Index(ctx context.Context, name string, url *url.URL) (string, error) { + if p.EmptyIndex { + return "", nil + } + if name == "prj" && url.String() == "http://example.com/aaa/bbb" { + return "index", nil + } + return "", rerror.ErrNotFound +} diff --git a/server/internal/app/repo.go b/server/internal/app/repo.go new file mode 100644 index 000000000..d3620e513 --- /dev/null +++ b/server/internal/app/repo.go @@ -0,0 +1,97 @@ +package app + +import ( + "context" + "fmt" + "time" + + "github.com/reearth/reearth-backend/internal/infrastructure/auth0" + "github.com/reearth/reearth-backend/internal/infrastructure/fs" + "github.com/reearth/reearth-backend/internal/infrastructure/gcs" + "github.com/reearth/reearth-backend/internal/infrastructure/google" + "github.com/reearth/reearth-backend/internal/infrastructure/mailer" + "github.com/reearth/reearth-backend/internal/infrastructure/marketplace" + mongorepo "github.com/reearth/reearth-backend/internal/infrastructure/mongo" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/spf13/afero" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" + "go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo" +) + +func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo.Container, *gateway.Container) { + repos := &repo.Container{} + gateways := &gateway.Container{} + + // Mongo + client, err := mongo.Connect( + ctx, + options.Client(). + ApplyURI(conf.DB). + SetConnectTimeout(time.Second*10). + SetMonitor(otelmongo.NewMonitor()), + ) + if err != nil { + log.Fatalf("repo initialization error: %+v\n", err) + } + if err := mongorepo.InitRepos(ctx, repos, client, "reearth"); err != nil { + log.Fatalf("Failed to init mongo: %+v\n", err) + } + + // File + datafs := afero.NewBasePathFs(afero.NewOsFs(), "data") + var fileRepo gateway.File + if conf.GCS.BucketName == "" { + log.Infoln("file: local storage is used") + fileRepo, err = fs.NewFile(datafs, conf.AssetBaseURL) + } else { + log.Infof("file: GCS storage is used: %s\n", conf.GCS.BucketName) + fileRepo, err = gcs.NewFile(conf.GCS.BucketName, conf.AssetBaseURL, conf.GCS.PublicationCacheControl) + if err != nil { + if debug { + log.Warnf("file: failed to init GCS storage: %s\n", err.Error()) + err = nil + } + } + } + if err != nil { + log.Fatalln(fmt.Sprintf("file: init error: %+v", err)) + } + gateways.File = fileRepo + + // Auth0 + gateways.Authenticator = auth0.New(conf.Auth0.Domain, conf.Auth0.ClientID, conf.Auth0.ClientSecret) + + // google + gateways.Google = google.NewGoogle() + + // mailer + gateways.Mailer = initMailer(conf) + + // Marketplace + if conf.Marketplace.Endpoint != "" { + gateways.PluginRegistry = marketplace.New(conf.Marketplace.Endpoint, conf.Marketplace.OAuth.Config()) + } + + // release lock of all scenes + if err := repos.SceneLock.ReleaseAllLock(context.Background()); err != nil { + log.Fatalln(fmt.Sprintf("repo initialization error: %+v", err)) + } + + return repos, gateways +} + +func initMailer(conf *Config) gateway.Mailer { + if conf.Mailer == "sendgrid" { + log.Infoln("mailer: sendgrid is used") + return mailer.NewSendGrid(conf.SendGrid.Name, conf.SendGrid.Email, conf.SendGrid.API) + } + if conf.Mailer == "smtp" { + log.Infoln("mailer: smtp is used") + return mailer.NewSMTP(conf.SMTP.Host, conf.SMTP.Port, conf.SMTP.SMTPUsername, conf.SMTP.Email, conf.SMTP.Password) + } + log.Infoln("mailer: logger is used") + return mailer.NewLogger() +} diff --git a/server/internal/app/tracer.go b/server/internal/app/tracer.go new file mode 100644 index 000000000..d7e5f9276 --- /dev/null +++ b/server/internal/app/tracer.go @@ -0,0 +1,68 @@ +package app + +import ( + "context" + "io" + + texporter "github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace" + "github.com/reearth/reearth-backend/pkg/log" + jaeger "github.com/uber/jaeger-client-go" + jaegercfg "github.com/uber/jaeger-client-go/config" + jaegerlog "github.com/uber/jaeger-client-go/log" + "github.com/uber/jaeger-lib/metrics" + "go.opentelemetry.io/otel" + sdktrace "go.opentelemetry.io/otel/sdk/trace" +) + +func initTracer(ctx context.Context, conf *Config) io.Closer { + if conf.Tracer == "gcp" { + initGCPTracer(ctx, conf) + } else if conf.Tracer == "jaeger" { + return initJaegerTracer(conf) + } + return nil +} + +func initGCPTracer(ctx context.Context, conf *Config) { + exporter, err := texporter.New(texporter.WithProjectID(conf.GCPProject)) + if err != nil { + log.Fatalln(err) + } + + tp := sdktrace.NewTracerProvider(sdktrace.WithSyncer(exporter), sdktrace.WithSampler(sdktrace.TraceIDRatioBased(conf.TracerSample))) + defer func() { + _ = tp.ForceFlush(ctx) + }() + + otel.SetTracerProvider(tp) + + log.Infof("tracer: initialized cloud trace with sample fraction: %g", conf.TracerSample) +} + +func initJaegerTracer(conf *Config) io.Closer { + cfg := jaegercfg.Configuration{ + Sampler: &jaegercfg.SamplerConfig{ + Type: jaeger.SamplerTypeConst, + Param: conf.TracerSample, + }, + Reporter: &jaegercfg.ReporterConfig{ + LogSpans: true, + }, + } + + jLogger := jaegerlog.StdLogger + jMetricsFactory := metrics.NullFactory + + closer, err := cfg.InitGlobalTracer( + "Re:Earth", + jaegercfg.Logger(jLogger), + jaegercfg.Metrics(jMetricsFactory), + ) + + if err != nil { + log.Fatalf("Could not initialize jaeger tracer: %s\n", err.Error()) + } + + log.Infof("tracer: initialized jaeger tracer with sample fraction: %g\n", conf.TracerSample) + return closer +} diff --git a/server/internal/app/usecase.go b/server/internal/app/usecase.go new file mode 100644 index 000000000..ac3a1800c --- /dev/null +++ b/server/internal/app/usecase.go @@ -0,0 +1,40 @@ +package app + +import ( + "context" + + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/adapter" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interactor" + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +func UsecaseMiddleware(r *repo.Container, g *gateway.Container, config interactor.ContainerConfig) echo.MiddlewareFunc { + return ContextMiddleware(func(ctx context.Context) context.Context { + var r2 *repo.Container + if op := adapter.Operator(ctx); op != nil && r != nil { + // apply filters to repos + r2 = r.Filtered( + repo.TeamFilterFromOperator(op), + repo.SceneFilterFromOperator(op), + ) + } else { + r2 = r + } + + uc := interactor.NewContainer(r2, g, config) + ctx = adapter.AttachUsecases(ctx, &uc) + return ctx + }) +} + +func ContextMiddleware(fn func(ctx context.Context) context.Context) echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + req := c.Request() + c.SetRequest(req.WithContext(fn(req.Context()))) + return next(c) + } + } +} diff --git a/server/internal/app/web.go b/server/internal/app/web.go new file mode 100644 index 000000000..f8d811d33 --- /dev/null +++ b/server/internal/app/web.go @@ -0,0 +1,48 @@ +package app + +import ( + "net/http" + "os" + "strings" + + "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" +) + +type WebConfig map[string]string + +func web(e *echo.Echo, wc WebConfig, a []AuthConfig) { + if _, err := os.Stat("web"); err != nil { + return // web won't be delivered + } + + e.Logger.Info("web: web directory will be delivered\n") + + config := map[string]string{} + if len(a) > 0 { + ac := a[0] + if ac.ISS != "" { + config["auth0Domain"] = strings.TrimSuffix(ac.ISS, "/") + } + if ac.ClientID != nil { + config["auth0ClientId"] = *ac.ClientID + } + if len(ac.AUD) > 0 { + config["auth0Audience"] = ac.AUD[0] + } + } + for k, v := range wc { + config[k] = v + } + + e.GET("/reearth_config.json", func(c echo.Context) error { + return c.JSON(http.StatusOK, config) + }) + + e.Use(middleware.StaticWithConfig(middleware.StaticConfig{ + Root: "web", + Index: "index.html", + Browse: false, + HTML5: true, + })) +} diff --git a/server/internal/infrastructure/adapter/plugin.go b/server/internal/infrastructure/adapter/plugin.go new file mode 100644 index 000000000..195267281 --- /dev/null +++ b/server/internal/infrastructure/adapter/plugin.go @@ -0,0 +1,77 @@ +package adapter + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +// TODO: ใ“ใ“ใงๅน…ๅ„ชๅ…ˆๆŽข็ดขใ—ใฆใ„ใใ‚ขใƒซใ‚ดใƒชใ‚บใƒ ใ‚’ๆ›ธใ„ใฆmongoใ‹ใ‚‰ใƒ“ใƒซใƒˆใ‚คใƒณใฎๆคœ็ดขใƒญใ‚ธใƒƒใ‚ฏใ‚’้™คๅŽปใ™ใ‚‹ +type pluginRepo struct { + readers []repo.Plugin + writer repo.Plugin +} + +// NewPlugin generates a new repository which has fallback repositories to be used when the plugin is not found +func NewPlugin(readers []repo.Plugin, writer repo.Plugin) repo.Plugin { + return &pluginRepo{ + readers: append([]repo.Plugin{}, readers...), + writer: writer, + } +} + +func (r *pluginRepo) Filtered(f repo.SceneFilter) repo.Plugin { + readers := make([]repo.Plugin, 0, len(r.readers)) + for _, r := range r.readers { + readers = append(readers, r.Filtered(f)) + } + return &pluginRepo{ + readers: readers, + writer: r.writer.Filtered(f), + } +} + +func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { + for _, re := range r.readers { + if res, err := re.FindByID(ctx, id); err != nil { + if errors.Is(err, rerror.ErrNotFound) { + continue + } else { + return nil, err + } + } else { + return res, nil + } + } + return nil, rerror.ErrNotFound +} + +func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { + results := make([]*plugin.Plugin, 0, len(ids)) + for _, id := range ids { + res, err := r.FindByID(ctx, id) + if err != nil && err != rerror.ErrNotFound { + return nil, err + } + results = append(results, res) + } + return results, nil +} + +func (r *pluginRepo) Save(ctx context.Context, p *plugin.Plugin) error { + if r.writer == nil { + return errors.New("cannot write") + } + return r.writer.Save(ctx, p) +} + +func (r *pluginRepo) Remove(ctx context.Context, p id.PluginID) error { + if r.writer == nil { + return errors.New("cannot write") + } + return r.writer.Remove(ctx, p) +} diff --git a/server/internal/infrastructure/adapter/property_schema.go b/server/internal/infrastructure/adapter/property_schema.go new file mode 100644 index 000000000..4576067d7 --- /dev/null +++ b/server/internal/infrastructure/adapter/property_schema.go @@ -0,0 +1,91 @@ +package adapter + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +// TODO: ใ“ใ“ใงๅน…ๅ„ชๅ…ˆๆŽข็ดขใ—ใฆใ„ใใ‚ขใƒซใ‚ดใƒชใ‚บใƒ ใ‚’ๆ›ธใ„ใฆmongoใ‹ใ‚‰ใƒ“ใƒซใƒˆใ‚คใƒณใฎๆคœ็ดขใƒญใ‚ธใƒƒใ‚ฏใ‚’้™คๅŽปใ™ใ‚‹ +type propertySchema struct { + readers []repo.PropertySchema + writer repo.PropertySchema +} + +// NewPropertySchema generates a new repository which has fallback repositories to be used when the property schema is not found +func NewPropertySchema(readers []repo.PropertySchema, writer repo.PropertySchema) repo.PropertySchema { + return &propertySchema{ + readers: append([]repo.PropertySchema{}, readers...), + writer: writer, + } +} + +func (r *propertySchema) Filtered(f repo.SceneFilter) repo.PropertySchema { + readers := make([]repo.PropertySchema, 0, len(r.readers)) + for _, r := range r.readers { + readers = append(readers, r.Filtered(f)) + } + return &propertySchema{ + readers: readers, + writer: r.writer.Filtered(f), + } +} + +func (r *propertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) (*property.Schema, error) { + for _, re := range r.readers { + if res, err := re.FindByID(ctx, id); err != nil { + if errors.Is(err, rerror.ErrNotFound) { + continue + } else { + return nil, err + } + } else { + return res, nil + } + } + return nil, rerror.ErrNotFound +} + +func (r *propertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { + results := make(property.SchemaList, 0, len(ids)) + for _, id := range ids { + res, err := r.FindByID(ctx, id) + if err != nil && err != rerror.ErrNotFound { + return nil, err + } + results = append(results, res) + } + return results, nil +} + +func (r *propertySchema) Save(ctx context.Context, p *property.Schema) error { + if r.writer == nil { + return rerror.ErrInternalBy(errors.New("writer is not set")) + } + return r.writer.Save(ctx, p) +} + +func (r *propertySchema) SaveAll(ctx context.Context, p property.SchemaList) error { + if r.writer == nil { + return rerror.ErrInternalBy(errors.New("writer is not set")) + } + return r.writer.SaveAll(ctx, p) +} + +func (r *propertySchema) Remove(ctx context.Context, p id.PropertySchemaID) error { + if r.writer == nil { + return rerror.ErrInternalBy(errors.New("writer is not set")) + } + return r.writer.Remove(ctx, p) +} + +func (r *propertySchema) RemoveAll(ctx context.Context, p []id.PropertySchemaID) error { + if r.writer == nil { + return rerror.ErrInternalBy(errors.New("writer is not set")) + } + return r.writer.RemoveAll(ctx, p) +} diff --git a/server/internal/infrastructure/auth0/authenticator.go b/server/internal/infrastructure/auth0/authenticator.go new file mode 100644 index 000000000..2d825779b --- /dev/null +++ b/server/internal/infrastructure/auth0/authenticator.go @@ -0,0 +1,169 @@ +package auth0 + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "io" + "net/http" + "net/url" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" + "golang.org/x/oauth2" + "golang.org/x/oauth2/clientcredentials" +) + +type Auth0 struct { + base string + client *http.Client + disableLogging bool +} + +type response struct { + ID string `json:"user_id"` + Name string `json:"name"` + UserName string `json:"username"` + Email string `json:"email"` + EmailVerified bool `json:"email_verified"` + Message string `json:"message"` + ErrorDescription string `json:"error_description"` +} + +func (u response) Into() gateway.AuthenticatorUser { + name := u.UserName + if name == "" { + name = u.Name + } + + return gateway.AuthenticatorUser{ + ID: u.ID, + Name: name, + Email: u.Email, + EmailVerified: u.EmailVerified, + } +} + +func (u response) Error() string { + if u.ErrorDescription != "" { + return u.ErrorDescription + } + return u.Message +} + +func New(domain, clientID, clientSecret string) *Auth0 { + base := urlFromDomain(domain) + conf := clientcredentials.Config{ + ClientID: clientID, + ClientSecret: clientSecret, + TokenURL: base + "oauth/token", + Scopes: []string{"read:users", "update:users"}, + AuthStyle: oauth2.AuthStyleInParams, + EndpointParams: url.Values{ + "audience": []string{base + "api/v2/"}, + }, + } + return &Auth0{ + base: base, + client: conf.Client(context.Background()), + } +} + +func (a *Auth0) UpdateUser(p gateway.AuthenticatorUpdateUserParam) (data gateway.AuthenticatorUser, err error) { + if err != nil { + return + } + + payload := map[string]string{} + if p.Name != nil { + payload["name"] = *p.Name + } + if p.Email != nil { + payload["email"] = *p.Email + } + if p.Password != nil { + payload["password"] = *p.Password + } + if len(payload) == 0 { + err = errors.New("nothing is updated") + return + } + + var r response + r, err = a.exec(http.MethodPatch, "api/v2/users/"+p.ID, payload) + if err != nil { + err = rerror.ErrInternalByWith("failed to update user", err) + return + } + + data = r.Into() + return +} + +func (a *Auth0) exec(method, path string, b any) (r response, err error) { + if a == nil || a.base == "" { + err = errors.New("auth0: domain is not set") + return + } + + var body io.Reader = nil + if b != nil { + if b2, ok := b.([]byte); ok { + body = bytes.NewReader(b2) + } else { + var b2 []byte + b2, err = json.Marshal(b) + if err != nil { + return + } + body = bytes.NewReader(b2) + } + } + + var req *http.Request + req, err = http.NewRequest(method, a.base+path, body) + if err != nil { + return + } + req.Header.Set("Content-Type", "application/json") + resp, err := a.client.Do(req) + if err != nil { + return + } + + defer func() { + _ = resp.Body.Close() + }() + + respb, err := io.ReadAll(resp.Body) + if err != nil { + return + } + + if !a.disableLogging { + log.Infof("auth0: path: %s, status: %d, resp: %s", path, resp.StatusCode, respb) + } + + if err = json.Unmarshal(respb, &r); err != nil { + return + } + + if resp.StatusCode >= 300 { + err = errors.New(r.Error()) + return + } + return +} + +func urlFromDomain(path string) string { + if path == "" { + return path + } + if !strings.HasPrefix(path, "http://") && !strings.HasPrefix(path, "https://") { + path = "https://" + path + } + return strings.TrimSuffix(path, "/") + "/" +} diff --git a/server/internal/infrastructure/auth0/authenticator_test.go b/server/internal/infrastructure/auth0/authenticator_test.go new file mode 100644 index 000000000..4bbb35680 --- /dev/null +++ b/server/internal/infrastructure/auth0/authenticator_test.go @@ -0,0 +1,107 @@ +package auth0 + +import ( + "encoding/json" + "net/http" + "strings" + "testing" + + "github.com/jarcoal/httpmock" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/stretchr/testify/assert" +) + +const ( + token = "a" + clientID = "clientclient" + clientSecret = "secretsecret" + domain = "https://reearth-dev.auth0.com/" + userID = "x" + expiresIn = 24 * 60 * 60 + userName = "d" + userEmail = "e" +) + +func TestAuth0(t *testing.T) { + a := New(domain, clientID, clientSecret) + a.disableLogging = true + + httpmock.Activate() + defer httpmock.Deactivate() + + httpmock.RegisterResponder("POST", domain+"oauth/token", func(req *http.Request) (*http.Response, error) { + _ = req.ParseForm() + assert.Equal(t, domain+"api/v2/", req.Form.Get("audience")) + assert.Equal(t, "client_credentials", req.Form.Get("grant_type")) + assert.Equal(t, "read:users update:users", req.Form.Get("scope")) + assert.Equal(t, clientID, req.Form.Get("client_id")) + assert.Equal(t, clientSecret, req.Form.Get("client_secret")) + return httpmock.NewJsonResponse(http.StatusOK, map[string]any{ + "access_token": token, + "token_type": "Bearer", + "scope": "read:users update:users", + "expires_in": expiresIn, + }) + }) + + httpmock.RegisterResponder("GET", domain+"api/v2/users/"+userID, func(req *http.Request) (*http.Response, error) { + if token != strings.TrimPrefix(req.Header.Get("Authorization"), "Bearer ") { + return httpmock.NewJsonResponse(http.StatusOK, map[string]any{ + "message": "Unauthorized", + }) + } + + return httpmock.NewJsonResponse(http.StatusOK, map[string]any{ + "user_id": userID, + "username": userName, + "email": userEmail, + "email_verified": true, + }) + }) + + httpmock.RegisterResponder("PATCH", domain+"api/v2/users/"+userID, func(req *http.Request) (*http.Response, error) { + if token != strings.TrimPrefix(req.Header.Get("Authorization"), "Bearer ") { + return httpmock.NewJsonResponse(http.StatusOK, map[string]any{ + "message": "Unauthorized", + }) + } + + var body map[string]string + _ = json.NewDecoder(req.Body).Decode(&body) + + resEmail := body["email"] + if resEmail == "" { + resEmail = userEmail + } + + resUsername := body["username"] + if resUsername == "" { + resUsername = userName + } + + return httpmock.NewJsonResponse(http.StatusOK, map[string]any{ + "user_id": userID, + "username": resUsername, + "email": resEmail, + "email_verified": true, + }) + }) + + newEmail := "xxxxx" + r, err := a.UpdateUser(gateway.AuthenticatorUpdateUserParam{ + ID: userID, + Email: &newEmail, + }) + assert.NoError(t, err) + assert.Equal(t, gateway.AuthenticatorUser{ + ID: userID, + Email: newEmail, + EmailVerified: true, + Name: userName, + }, r) +} + +func TestURLFromDomain(t *testing.T) { + assert.Equal(t, "https://a/", urlFromDomain("a")) + assert.Equal(t, "https://a/", urlFromDomain("a/")) +} diff --git a/server/internal/infrastructure/fs/common.go b/server/internal/infrastructure/fs/common.go new file mode 100644 index 000000000..0eb93b450 --- /dev/null +++ b/server/internal/infrastructure/fs/common.go @@ -0,0 +1,8 @@ +package fs + +const ( + assetDir = "assets" + pluginDir = "plugins" + publishedDir = "published" + manifestFilePath = "reearth.yml" +) diff --git a/server/internal/infrastructure/fs/file.go b/server/internal/infrastructure/fs/file.go new file mode 100644 index 000000000..6ea331fff --- /dev/null +++ b/server/internal/infrastructure/fs/file.go @@ -0,0 +1,192 @@ +package fs + +import ( + "context" + "errors" + "io" + "net/url" + "os" + "path" + "path/filepath" + + "github.com/kennygrant/sanitize" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/spf13/afero" +) + +type fileRepo struct { + fs afero.Fs + urlBase *url.URL +} + +func NewFile(fs afero.Fs, urlBase string) (gateway.File, error) { + var b *url.URL + var err error + b, err = url.Parse(urlBase) + if err != nil { + return nil, errors.New("invalid base URL") + } + + return &fileRepo{ + fs: fs, + urlBase: b, + }, nil +} + +// asset + +func (f *fileRepo) ReadAsset(ctx context.Context, filename string) (io.ReadCloser, error) { + return f.read(ctx, filepath.Join(assetDir, sanitize.Path(filename))) +} + +func (f *fileRepo) UploadAsset(ctx context.Context, file *file.File) (*url.URL, error) { + filename := sanitize.Path(newAssetID() + path.Ext(file.Path)) + if err := f.upload(ctx, filepath.Join(assetDir, filename), file.Content); err != nil { + return nil, err + } + return getAssetFileURL(f.urlBase, filename), nil +} + +func (f *fileRepo) RemoveAsset(ctx context.Context, u *url.URL) error { + if u == nil { + return nil + } + p := sanitize.Path(u.Path) + if p == "" || f.urlBase == nil || u.Scheme != f.urlBase.Scheme || u.Host != f.urlBase.Host || path.Dir(p) != f.urlBase.Path { + return gateway.ErrInvalidFile + } + return f.delete(ctx, filepath.Join(assetDir, path.Base(p))) +} + +// plugin + +func (f *fileRepo) ReadPluginFile(ctx context.Context, pid id.PluginID, filename string) (io.ReadCloser, error) { + return f.read(ctx, filepath.Join(pluginDir, pid.String(), sanitize.Path(filename))) +} + +func (f *fileRepo) UploadPluginFile(ctx context.Context, pid id.PluginID, file *file.File) error { + return f.upload(ctx, filepath.Join(pluginDir, pid.String(), sanitize.Path(file.Path)), file.Content) +} + +func (f *fileRepo) RemovePlugin(ctx context.Context, pid id.PluginID) error { + return f.delete(ctx, filepath.Join(pluginDir, pid.String())) +} + +// built scene + +func (f *fileRepo) ReadBuiltSceneFile(ctx context.Context, name string) (io.ReadCloser, error) { + return f.read(ctx, filepath.Join(publishedDir, sanitize.Path(name+".json"))) +} + +func (f *fileRepo) UploadBuiltScene(ctx context.Context, reader io.Reader, name string) error { + return f.upload(ctx, filepath.Join(publishedDir, sanitize.Path(name+".json")), reader) +} + +func (f *fileRepo) MoveBuiltScene(ctx context.Context, oldName, name string) error { + return f.move( + ctx, + filepath.Join(publishedDir, sanitize.Path(oldName+".json")), + filepath.Join(publishedDir, sanitize.Path(name+".json")), + ) +} + +func (f *fileRepo) RemoveBuiltScene(ctx context.Context, name string) error { + return f.delete(ctx, filepath.Join(publishedDir, sanitize.Path(name+".json"))) +} + +// helpers + +func (f *fileRepo) read(ctx context.Context, filename string) (io.ReadCloser, error) { + if filename == "" { + return nil, rerror.ErrNotFound + } + + file, err := f.fs.Open(filename) + if err != nil { + if os.IsNotExist(err) { + return nil, rerror.ErrNotFound + } + return nil, rerror.ErrInternalBy(err) + } + return file, nil +} + +func (f *fileRepo) upload(ctx context.Context, filename string, content io.Reader) error { + if filename == "" { + return gateway.ErrFailedToUploadFile + } + + if fnd := path.Dir(filename); fnd != "" { + if err := f.fs.MkdirAll(fnd, 0755); err != nil { + return rerror.ErrInternalBy(err) + } + } + + dest, err := f.fs.Create(filename) + if err != nil { + return rerror.ErrInternalBy(err) + } + defer func() { + _ = dest.Close() + }() + + if _, err := io.Copy(dest, content); err != nil { + return gateway.ErrFailedToUploadFile + } + + return nil +} + +func (f *fileRepo) move(ctx context.Context, from, dest string) error { + if from == "" || dest == "" || from == dest { + return gateway.ErrInvalidFile + } + + if destd := path.Dir(dest); destd != "" { + if err := f.fs.MkdirAll(destd, 0755); err != nil { + return rerror.ErrInternalBy(err) + } + } + + if err := f.fs.Rename(from, dest); err != nil { + if os.IsNotExist(err) { + return rerror.ErrNotFound + } + return rerror.ErrInternalBy(err) + } + + return nil +} + +func (f *fileRepo) delete(ctx context.Context, filename string) error { + if filename == "" { + return gateway.ErrFailedToUploadFile + } + + if err := f.fs.RemoveAll(filename); err != nil { + if os.IsNotExist(err) { + return nil + } + return rerror.ErrInternalBy(err) + } + return nil +} + +func getAssetFileURL(base *url.URL, filename string) *url.URL { + if base == nil { + return nil + } + + // https://github.com/golang/go/issues/38351 + b := *base + b.Path = path.Join(b.Path, filename) + return &b +} + +func newAssetID() string { + // TODO: replace + return id.NewAssetID().String() +} diff --git a/server/internal/infrastructure/fs/file_test.go b/server/internal/infrastructure/fs/file_test.go new file mode 100644 index 000000000..065a8bfa9 --- /dev/null +++ b/server/internal/infrastructure/fs/file_test.go @@ -0,0 +1,265 @@ +package fs + +import ( + "context" + "io" + "net/url" + "os" + "path" + "path/filepath" + "strings" + "testing" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestNewFile(t *testing.T) { + f, err := NewFile(mockFs(), "") + assert.NoError(t, err) + assert.NotNil(t, f) +} + +func TestFile_ReadAsset(t *testing.T) { + f, _ := NewFile(mockFs(), "") + + r, err := f.ReadAsset(context.Background(), "xxx.txt") + assert.NoError(t, err) + c, err := io.ReadAll(r) + assert.NoError(t, err) + assert.Equal(t, "hello", string(c)) + assert.NoError(t, r.Close()) + + r, err = f.ReadAsset(context.Background(), "aaa.txt") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) + + r, err = f.ReadAsset(context.Background(), "../published/s.json") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) +} + +func TestFile_UploadAsset(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "https://example.com/assets") + + u, err := f.UploadAsset(context.Background(), &file.File{ + Path: "aaa.txt", + Content: io.NopCloser(strings.NewReader("aaa")), + }) + assert.NoError(t, err) + assert.Equal(t, "https", u.Scheme) + assert.Equal(t, "example.com", u.Host) + assert.True(t, strings.HasPrefix(u.Path, "/assets/")) + assert.Equal(t, ".txt", path.Ext(u.Path)) + + uf, _ := fs.Open(filepath.Join("assets", path.Base(u.Path))) + c, _ := io.ReadAll(uf) + assert.Equal(t, "aaa", string(c)) +} + +func TestFile_RemoveAsset(t *testing.T) { + cases := []struct { + Name string + URL string + Deleted bool + Err error + }{ + { + Name: "deleted", + URL: "https://example.com/assets/xxx.txt", + Deleted: true, + }, + { + Name: "not deleted 1", + URL: "https://example.com/assets/aaa.txt", + Err: nil, + }, + { + Name: "not deleted 2", + URL: "https://example.com/plugins/xxx.txt", + Err: gateway.ErrInvalidFile, + }, + } + + for _, tc := range cases { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + fs := mockFs() + f, _ := NewFile(fs, "https://example.com/assets") + + u, _ := url.Parse(tc.URL) + err := f.RemoveAsset(context.Background(), u) + + if tc.Err == nil { + assert.NoError(t, err) + } else { + assert.Same(t, tc.Err, err) + } + + _, err = fs.Stat(filepath.Join("assets", "xxx.txt")) + if tc.Deleted { + assert.ErrorIs(t, err, os.ErrNotExist) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestFile_ReadPluginFile(t *testing.T) { + f, _ := NewFile(mockFs(), "") + + r, err := f.ReadPluginFile(context.Background(), id.MustPluginID("aaa~1.0.0"), "foo.js") + assert.NoError(t, err) + c, err := io.ReadAll(r) + assert.NoError(t, err) + assert.Equal(t, "bar", string(c)) + assert.NoError(t, r.Close()) + + r, err = f.ReadPluginFile(context.Background(), id.MustPluginID("aaa~1.0.0"), "aaa.txt") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) + + r, err = f.ReadPluginFile(context.Background(), id.MustPluginID("aaa~1.0.1"), "foo.js") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) + + r, err = f.ReadPluginFile(context.Background(), id.MustPluginID("aaa~1.0.1"), "../../assets/xxx.txt") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) +} + +func TestFile_UploadPluginFile(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "") + + err := f.UploadPluginFile(context.Background(), id.MustPluginID("aaa~1.0.1"), &file.File{ + Path: "aaa.js", + Content: io.NopCloser(strings.NewReader("aaa")), + }) + assert.NoError(t, err) + + uf, _ := fs.Open(filepath.Join("plugins", "aaa~1.0.1", "aaa.js")) + c, _ := io.ReadAll(uf) + assert.Equal(t, "aaa", string(c)) +} + +func TestFile_RemovePluginFile(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "") + + err := f.RemovePlugin(context.Background(), id.MustPluginID("aaa~1.0.1")) + assert.NoError(t, err) + + _, err = fs.Stat(filepath.Join("plugins", "aaa~1.0.0")) + assert.NoError(t, err) + + err = f.RemovePlugin(context.Background(), id.MustPluginID("aaa~1.0.0")) + assert.NoError(t, err) + + _, err = fs.Stat(filepath.Join("plugins", "aaa~1.0.0")) + assert.ErrorIs(t, err, os.ErrNotExist) +} + +func TestFile_ReadBuiltSceneFile(t *testing.T) { + f, _ := NewFile(mockFs(), "") + + r, err := f.ReadBuiltSceneFile(context.Background(), "s") + assert.NoError(t, err) + c, err := io.ReadAll(r) + assert.NoError(t, err) + assert.Equal(t, "{}", string(c)) + assert.NoError(t, r.Close()) + + r, err = f.ReadBuiltSceneFile(context.Background(), "a") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) + + r, err = f.ReadBuiltSceneFile(context.Background(), "../assets/xxx.txt") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) +} + +func TestFile_UploadBuiltScene(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "") + + err := f.UploadBuiltScene(context.Background(), io.NopCloser(strings.NewReader("{\"aaa\":1}")), "a") + assert.NoError(t, err) + + uf, _ := fs.Open(filepath.Join("published", "a.json")) + c, _ := io.ReadAll(uf) + assert.Equal(t, "{\"aaa\":1}", string(c)) +} + +func TestFile_MoveBuiltScene(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "") + + uf, _ := fs.Open(filepath.Join("published", "s.json")) + c, _ := io.ReadAll(uf) + assert.Equal(t, "{}", string(c)) + + uf, err := fs.Open(filepath.Join("published", "a.json")) + assert.ErrorIs(t, err, os.ErrNotExist) + assert.Nil(t, uf) + + err = f.MoveBuiltScene(context.Background(), "s", "a") + assert.NoError(t, err) + + uf, err = fs.Open(filepath.Join("published", "s.json")) + assert.ErrorIs(t, err, os.ErrNotExist) + assert.Nil(t, uf) + + uf, _ = fs.Open(filepath.Join("published", "a.json")) + c, _ = io.ReadAll(uf) + assert.Equal(t, "{}", string(c)) +} + +func TestFile_RemoveBuiltScene(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "") + + err := f.RemoveBuiltScene(context.Background(), "a") + assert.NoError(t, err) + + _, err = fs.Stat(filepath.Join("published", "s.json")) + assert.NoError(t, err) + + err = f.RemoveBuiltScene(context.Background(), "s") + assert.NoError(t, err) + + _, err = fs.Stat(filepath.Join("published", "s.json")) + assert.ErrorIs(t, err, os.ErrNotExist) +} + +func TestGetAssetFileURL(t *testing.T) { + e, err := url.Parse("http://hoge.com/assets/xxx.yyy") + assert.NoError(t, err) + b, err := url.Parse("http://hoge.com/assets") + assert.NoError(t, err) + assert.Equal(t, e, getAssetFileURL(b, "xxx.yyy")) +} + +func mockFs() afero.Fs { + files := map[string]string{ + "assets/xxx.txt": "hello", + "plugins/aaa~1.0.0/foo.js": "bar", + "published/s.json": "{}", + } + + fs := afero.NewMemMapFs() + for name, content := range files { + f, _ := fs.Create(name) + _, _ = f.WriteString(content) + _ = f.Close() + } + return fs +} diff --git a/server/internal/infrastructure/fs/plugin.go b/server/internal/infrastructure/fs/plugin.go new file mode 100644 index 000000000..e19501088 --- /dev/null +++ b/server/internal/infrastructure/fs/plugin.go @@ -0,0 +1,124 @@ +package fs + +import ( + "context" + "errors" + "path/filepath" + "regexp" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/spf13/afero" +) + +type pluginRepo struct { + fs afero.Fs + f repo.SceneFilter +} + +func NewPlugin(fs afero.Fs) repo.Plugin { + return &pluginRepo{ + fs: fs, + } +} + +func (r *pluginRepo) Filtered(f repo.SceneFilter) repo.Plugin { + return &pluginRepo{ + fs: r.fs, + f: r.f.Merge(f), + } +} + +func (r *pluginRepo) FindByID(ctx context.Context, pid id.PluginID) (*plugin.Plugin, error) { + m, err := readPluginManifest(r.fs, pid) + if err != nil { + return nil, err + } + + if s := m.Plugin.ID().Scene(); s != nil && !r.f.CanRead(*s) { + return nil, nil + } + + return m.Plugin, nil +} + +func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { + results := make([]*plugin.Plugin, 0, len(ids)) + for _, id := range ids { + res, err := r.FindByID(ctx, id) + if err != nil { + return nil, err + } + results = append(results, res) + } + return results, nil +} + +func (r *pluginRepo) Save(ctx context.Context, p *plugin.Plugin) error { + return rerror.ErrInternalBy(errors.New("read only")) +} + +func (r *pluginRepo) Remove(ctx context.Context, pid id.PluginID) error { + return rerror.ErrInternalBy(errors.New("read only")) +} + +var translationFileNameRegexp = regexp.MustCompile(`reearth_([a-zA-Z]+(?:-[a-zA-Z]+)?).yml`) + +func readPluginManifest(fs afero.Fs, pid id.PluginID) (*manifest.Manifest, error) { + base := filepath.Join(pluginDir, pid.String()) + translationMap, err := readPluginTranslation(fs, base) + if err != nil { + return nil, err + } + + f, err := fs.Open(filepath.Join(base, manifestFilePath)) + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + defer func() { + _ = f.Close() + }() + + m, err := manifest.Parse(f, nil, translationMap.TranslatedRef()) + if err != nil { + return nil, err + } + + return m, nil +} + +func readPluginTranslation(fs afero.Fs, base string) (manifest.TranslationMap, error) { + d, err := afero.ReadDir(fs, base) + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + + translationMap := manifest.TranslationMap{} + for _, e := range d { + if e.IsDir() { + continue + } + name := e.Name() + lang := translationFileNameRegexp.FindStringSubmatch(name) + if len(lang) == 0 { + continue + } + langfile, err := fs.Open(filepath.Join(base, name)) + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + defer func() { + _ = langfile.Close() + }() + t, err := manifest.ParseTranslation(langfile) + if err != nil { + return nil, err + } + translationMap[lang[1]] = t + } + + return translationMap, nil +} diff --git a/server/internal/infrastructure/fs/plugin_test.go b/server/internal/infrastructure/fs/plugin_test.go new file mode 100644 index 000000000..b15296921 --- /dev/null +++ b/server/internal/infrastructure/fs/plugin_test.go @@ -0,0 +1,39 @@ +package fs + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestPlugin(t *testing.T) { + ctx := context.Background() + fs := NewPlugin(mockPluginFS()) + p, err := fs.FindByID(ctx, plugin.MustID("testplugin~1.0.0")) + assert.NoError(t, err) + assert.Equal(t, plugin.New().ID(plugin.MustID("testplugin~1.0.0")).Name(i18n.String{ + "en": "testplugin", + "ja": "ใƒ†ใ‚นใƒˆใƒ—ใƒฉใ‚ฐใ‚คใƒณ", + "zh-CN": "ๆต‹่ฏ•ๆ’ไปถ", + }).MustBuild(), p) +} + +func mockPluginFS() afero.Fs { + files := map[string]string{ + "plugins/testplugin~1.0.0/reearth.yml": `{ "id": "testplugin", "version": "1.0.0", "name": "testplugin" }`, + "plugins/testplugin~1.0.0/reearth_ja.yml": `{ "name": "ใƒ†ใ‚นใƒˆใƒ—ใƒฉใ‚ฐใ‚คใƒณ" }`, + "plugins/testplugin~1.0.0/reearth_zh-CN.yml": `{ "name": "ๆต‹่ฏ•ๆ’ไปถ" }`, + } + + fs := afero.NewMemMapFs() + for name, content := range files { + f, _ := fs.Create(name) + _, _ = f.WriteString(content) + _ = f.Close() + } + return fs +} diff --git a/server/internal/infrastructure/fs/property_schema.go b/server/internal/infrastructure/fs/property_schema.go new file mode 100644 index 000000000..ffb0b9619 --- /dev/null +++ b/server/internal/infrastructure/fs/property_schema.go @@ -0,0 +1,82 @@ +package fs + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/spf13/afero" +) + +type propertySchema struct { + fs afero.Fs + f repo.SceneFilter +} + +func NewPropertySchema(fs afero.Fs) repo.PropertySchema { + return &propertySchema{ + fs: fs, + } +} + +func (r *propertySchema) Filtered(f repo.SceneFilter) repo.PropertySchema { + return &propertySchema{ + fs: r.fs, + f: r.f.Merge(f), + } +} + +func (r *propertySchema) FindByID(ctx context.Context, i id.PropertySchemaID) (*property.Schema, error) { + m, err := readPluginManifest(r.fs, i.Plugin()) + if err != nil { + return nil, err + } + + if m.Schema != nil && m.Schema.ID() == i { + return m.Schema, nil + } + + for _, ps := range m.ExtensionSchema { + if ps == nil { + continue + } + if ps.ID().Equal(i) { + if s := ps.Scene(); s == nil || r.f.CanRead(*s) { + return ps, nil + } + } + } + + return nil, rerror.ErrNotFound +} + +func (r *propertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { + results := make(property.SchemaList, 0, len(ids)) + for _, id := range ids { + res, err := r.FindByID(ctx, id) + if err != nil { + return nil, err + } + results = append(results, res) + } + return results, nil +} + +func (r *propertySchema) Save(ctx context.Context, p *property.Schema) error { + return rerror.ErrInternalBy(errors.New("read only")) +} + +func (r *propertySchema) SaveAll(ctx context.Context, p property.SchemaList) error { + return rerror.ErrInternalBy(errors.New("read only")) +} + +func (r *propertySchema) Remove(ctx context.Context, pid id.PropertySchemaID) error { + return rerror.ErrInternalBy(errors.New("read only")) +} + +func (r *propertySchema) RemoveAll(ctx context.Context, pid []id.PropertySchemaID) error { + return rerror.ErrInternalBy(errors.New("read only")) +} diff --git a/server/internal/infrastructure/gcs/file.go b/server/internal/infrastructure/gcs/file.go new file mode 100644 index 000000000..154fde67a --- /dev/null +++ b/server/internal/infrastructure/gcs/file.go @@ -0,0 +1,335 @@ +package gcs + +import ( + "context" + "errors" + "fmt" + "io" + "net/url" + "path" + "strings" + + "cloud.google.com/go/storage" + "github.com/kennygrant/sanitize" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" + "google.golang.org/api/iterator" +) + +const ( + gcsAssetBasePath string = "assets" + gcsPluginBasePath string = "plugins" + gcsMapBasePath string = "maps" + fileSizeLimit int64 = 1024 * 1024 * 100 // about 100MB +) + +type fileRepo struct { + bucketName string + base *url.URL + cacheControl string +} + +func NewFile(bucketName, base string, cacheControl string) (gateway.File, error) { + if bucketName == "" { + return nil, errors.New("bucket name is empty") + } + + var u *url.URL + if base == "" { + base = fmt.Sprintf("https://storage.googleapis.com/%s", bucketName) + } + + var err error + u, _ = url.Parse(base) + if err != nil { + return nil, errors.New("invalid base URL") + } + + return &fileRepo{ + bucketName: bucketName, + base: u, + cacheControl: cacheControl, + }, nil +} + +func (f *fileRepo) ReadAsset(ctx context.Context, name string) (io.ReadCloser, error) { + sn := sanitize.Path(name) + if sn == "" { + return nil, rerror.ErrNotFound + } + return f.read(ctx, path.Join(gcsAssetBasePath, sn)) +} + +func (f *fileRepo) UploadAsset(ctx context.Context, file *file.File) (*url.URL, error) { + if file == nil { + return nil, gateway.ErrInvalidFile + } + if file.Size >= fileSizeLimit { + return nil, gateway.ErrFileTooLarge + } + + sn := sanitize.Path(newAssetID() + path.Ext(file.Path)) + if sn == "" { + return nil, gateway.ErrInvalidFile + } + + filename := path.Join(gcsAssetBasePath, sn) + u := getGCSObjectURL(f.base, filename) + if u == nil { + return nil, gateway.ErrInvalidFile + } + + if err := f.upload(ctx, filename, file.Content); err != nil { + return nil, err + } + return u, nil +} + +func (f *fileRepo) RemoveAsset(ctx context.Context, u *url.URL) error { + sn := getGCSObjectNameFromURL(f.base, u) + if sn == "" { + return gateway.ErrInvalidFile + } + return f.delete(ctx, sn) +} + +// plugin + +func (f *fileRepo) ReadPluginFile(ctx context.Context, pid id.PluginID, filename string) (io.ReadCloser, error) { + sn := sanitize.Path(filename) + if sn == "" { + return nil, rerror.ErrNotFound + } + return f.read(ctx, path.Join(gcsPluginBasePath, pid.String(), sn)) +} + +func (f *fileRepo) UploadPluginFile(ctx context.Context, pid id.PluginID, file *file.File) error { + sn := sanitize.Path(file.Path) + if sn == "" { + return gateway.ErrInvalidFile + } + return f.upload(ctx, path.Join(gcsPluginBasePath, pid.String(), sanitize.Path(file.Path)), file.Content) +} + +func (f *fileRepo) RemovePlugin(ctx context.Context, pid id.PluginID) error { + return f.deleteAll(ctx, path.Join(gcsPluginBasePath, pid.String())) +} + +// built scene + +func (f *fileRepo) ReadBuiltSceneFile(ctx context.Context, name string) (io.ReadCloser, error) { + if name == "" { + return nil, rerror.ErrNotFound + } + return f.read(ctx, path.Join(gcsMapBasePath, sanitize.Path(name)+".json")) +} + +func (f *fileRepo) UploadBuiltScene(ctx context.Context, content io.Reader, name string) error { + sn := sanitize.Path(name + ".json") + if sn == "" { + return gateway.ErrInvalidFile + } + return f.upload(ctx, path.Join(gcsMapBasePath, sn), content) +} + +func (f *fileRepo) MoveBuiltScene(ctx context.Context, oldName, name string) error { + from := sanitize.Path(oldName + ".json") + dest := sanitize.Path(name + ".json") + if from == "" || dest == "" { + return gateway.ErrInvalidFile + } + return f.move(ctx, path.Join(gcsMapBasePath, from), path.Join(gcsMapBasePath, dest)) +} + +func (f *fileRepo) RemoveBuiltScene(ctx context.Context, name string) error { + sn := sanitize.Path(name + ".json") + if sn == "" { + return gateway.ErrInvalidFile + } + return f.delete(ctx, path.Join(gcsMapBasePath, sn)) +} + +// helpers + +func (f *fileRepo) bucket(ctx context.Context) (*storage.BucketHandle, error) { + client, err := storage.NewClient(ctx) + if err != nil { + return nil, err + } + bucket := client.Bucket(f.bucketName) + return bucket, nil +} + +func (f *fileRepo) read(ctx context.Context, filename string) (io.ReadCloser, error) { + if filename == "" { + return nil, rerror.ErrNotFound + } + + bucket, err := f.bucket(ctx) + if err != nil { + log.Errorf("gcs: read bucket err: %+v\n", err) + return nil, rerror.ErrInternalBy(err) + } + + reader, err := bucket.Object(filename).NewReader(ctx) + if err != nil { + if errors.Is(err, storage.ErrObjectNotExist) { + return nil, rerror.ErrNotFound + } + log.Errorf("gcs: read err: %+v\n", err) + return nil, rerror.ErrInternalBy(err) + } + + return reader, nil +} + +func (f *fileRepo) upload(ctx context.Context, filename string, content io.Reader) error { + if filename == "" { + return gateway.ErrInvalidFile + } + + bucket, err := f.bucket(ctx) + if err != nil { + log.Errorf("gcs: upload bucket err: %+v\n", err) + return rerror.ErrInternalBy(err) + } + + object := bucket.Object(filename) + if err := object.Delete(ctx); err != nil && !errors.Is(err, storage.ErrObjectNotExist) { + log.Errorf("gcs: upload delete err: %+v\n", err) + return gateway.ErrFailedToUploadFile + } + + writer := object.NewWriter(ctx) + writer.ObjectAttrs.CacheControl = f.cacheControl + + if _, err := io.Copy(writer, content); err != nil { + log.Errorf("gcs: upload err: %+v\n", err) + return gateway.ErrFailedToUploadFile + } + + if err := writer.Close(); err != nil { + log.Errorf("gcs: upload close err: %+v\n", err) + return gateway.ErrFailedToUploadFile + } + + return nil +} + +func (f *fileRepo) move(ctx context.Context, from, dest string) error { + if from == "" || dest == "" || from == dest { + return gateway.ErrInvalidFile + } + + bucket, err := f.bucket(ctx) + if err != nil { + log.Errorf("gcs: move bucket err: %+v\n", err) + return rerror.ErrInternalBy(err) + } + + object := bucket.Object(from) + destObject := bucket.Object(dest) + if _, err := destObject.CopierFrom(object).Run(ctx); err != nil { + if errors.Is(err, storage.ErrObjectNotExist) { + return rerror.ErrNotFound + } + log.Errorf("gcs: move copy err: %+v\n", err) + return rerror.ErrInternalBy(err) + } + + if err := object.Delete(ctx); err != nil { + log.Errorf("gcs: move delete err: %+v\n", err) + return rerror.ErrInternalBy(err) + } + + return nil +} + +func (f *fileRepo) delete(ctx context.Context, filename string) error { + if filename == "" { + return gateway.ErrInvalidFile + } + + bucket, err := f.bucket(ctx) + if err != nil { + log.Errorf("gcs: delete bucket err: %+v\n", err) + return rerror.ErrInternalBy(err) + } + + object := bucket.Object(filename) + if err := object.Delete(ctx); err != nil { + if errors.Is(err, storage.ErrObjectNotExist) { + return nil + } + + log.Errorf("gcs: delete err: %+v\n", err) + return rerror.ErrInternalBy(err) + } + return nil +} + +func (f *fileRepo) deleteAll(ctx context.Context, path string) error { + if path == "" { + return gateway.ErrInvalidFile + } + + bucket, err := f.bucket(ctx) + if err != nil { + log.Errorf("gcs: deleteAll bucket err: %+v\n", err) + return rerror.ErrInternalBy(err) + } + + it := bucket.Objects(ctx, &storage.Query{ + Prefix: path, + }) + + for { + attrs, err := it.Next() + if err == iterator.Done { + break + } + if err != nil { + log.Errorf("gcs: deleteAll next err: %+v\n", err) + return rerror.ErrInternalBy(err) + } + if err := bucket.Object(attrs.Name).Delete(ctx); err != nil { + log.Errorf("gcs: deleteAll err: %+v\n", err) + return rerror.ErrInternalBy(err) + } + } + return nil +} + +func getGCSObjectURL(base *url.URL, objectName string) *url.URL { + if base == nil { + return nil + } + + // https://github.com/golang/go/issues/38351 + b := *base + b.Path = path.Join(b.Path, objectName) + return &b +} + +func getGCSObjectNameFromURL(base, u *url.URL) string { + if u == nil { + return "" + } + if base == nil { + base = &url.URL{} + } + p := sanitize.Path(strings.TrimPrefix(u.Path, "/")) + if p == "" || u.Host != base.Host || u.Scheme != base.Scheme || !strings.HasPrefix(p, gcsAssetBasePath+"/") { + return "" + } + + return p +} + +func newAssetID() string { + // TODO: replace + return id.NewAssetID().String() +} diff --git a/server/internal/infrastructure/gcs/file_test.go b/server/internal/infrastructure/gcs/file_test.go new file mode 100644 index 000000000..e2e48e944 --- /dev/null +++ b/server/internal/infrastructure/gcs/file_test.go @@ -0,0 +1,24 @@ +package gcs + +import ( + "net/url" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGetGCSObjectURL(t *testing.T) { + e, _ := url.Parse("https://hoge.com/assets/xxx.yyy") + b, _ := url.Parse("https://hoge.com/assets") + assert.Equal(t, e, getGCSObjectURL(b, "xxx.yyy")) +} + +func TestGetGCSObjectNameFromURL(t *testing.T) { + u, _ := url.Parse("https://hoge.com/assets/xxx.yyy") + b, _ := url.Parse("https://hoge.com") + b2, _ := url.Parse("https://hoge2.com") + assert.Equal(t, "assets/xxx.yyy", getGCSObjectNameFromURL(b, u)) + assert.Equal(t, "", getGCSObjectNameFromURL(b2, u)) + assert.Equal(t, "", getGCSObjectNameFromURL(nil, u)) + assert.Equal(t, "", getGCSObjectNameFromURL(b, nil)) +} diff --git a/server/internal/infrastructure/google/fetch.go b/server/internal/infrastructure/google/fetch.go new file mode 100644 index 000000000..c0309836e --- /dev/null +++ b/server/internal/infrastructure/google/fetch.go @@ -0,0 +1,42 @@ +package google + +import ( + "fmt" + "io" + "net/http" + "net/url" +) + +func sheetURL(fileId string, sheetName string) string { + gurl := url.URL{ + Scheme: "https", + Host: "docs.google.com", + Path: fmt.Sprintf("spreadsheets/d/%s/gviz/tq", fileId), + } + + queryValues := gurl.Query() + queryValues.Set("tqx", "out:csv") + queryValues.Set("sheet", sheetName) + gurl.RawQuery = queryValues.Encode() + + return gurl.String() +} + +func fetchCSV(token string, fileId string, sheetName string) (*io.ReadCloser, error) { + u := sheetURL(fileId, sheetName) + req, err := http.NewRequest("GET", u, nil) + if err != nil { + return nil, err + } + + req.Header.Set("Authorization", "Bearer "+token) + res, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + if res.StatusCode != http.StatusOK { + return nil, fmt.Errorf("StatusCode=%d", res.StatusCode) + } + + return &res.Body, nil +} diff --git a/server/internal/infrastructure/google/fetch_test.go b/server/internal/infrastructure/google/fetch_test.go new file mode 100644 index 000000000..21e9b89be --- /dev/null +++ b/server/internal/infrastructure/google/fetch_test.go @@ -0,0 +1,92 @@ +package google + +import ( + "net/http" + "testing" + + "github.com/reearth/reearth-backend/pkg/file" + "github.com/stretchr/testify/assert" + "gopkg.in/h2non/gock.v1" +) + +func Test_fetchCSV(t *testing.T) { + t.Cleanup(func() { + gock.EnableNetworking() + gock.OffAll() + }) + + gock.DisableNetworking() + + type args struct { + token string + fileId string + sheetName string + } + + tests := []struct { + name string + setup func() + args args + want *file.File + wantErr bool + }{ + { + name: "Invalid Token", + setup: func() { + gock.New("https://docs.google.com"). + Get("/spreadsheets/d/(.*)/gviz/tq"). + PathParam("d", "1bXBDUrOgYWdHzScMiLNHRUsmNC9SUV4VFOvpqrx0Yok"). + MatchParams(map[string]string{ + "tqx": "out:csv", + "sheet": "Dataset1", + }). + Reply(http.StatusUnauthorized) + }, + args: args{ + token: "xxxx", + fileId: "1bXBDUrOgYWdHzScMiLNHRUsmNC9SUV4VFOvpqrxxxxx", + sheetName: "Dataset1", + }, + wantErr: true, + }, + { + name: "Working scenario", + setup: func() { + gock.New("https://docs.google.com"). + Get("/spreadsheets/d/(.*)/gviz/tq"). + PathParam("d", "1bXBDUrOgYWdHzScMiLNHRUsmNC9SUV4VFOvpqrxxxxx"). + MatchParams(map[string]string{ + "tqx": "out:csv", + "sheet": "Dataset1", + }). + Reply(http.StatusOK). + BodyString("lat,lng,hieght\n30,35,300\n30.1,35,400") + }, + args: args{ + token: "xxxx", + fileId: "1bXBDUrOgYWdHzScMiLNHRUsmNC9SUV4VFOvpqrxxxxx", + sheetName: "Dataset1", + }, + wantErr: false, + }, + } + + for _, tt := range tests { + tt := tt + + t.Run(tt.name, func(t *testing.T) { + tt.setup() + + got, err := fetchCSV(tt.args.token, tt.args.fileId, tt.args.sheetName) + if (err != nil) != tt.wantErr { + t.Errorf("fetchCSV() error = %v, wantErr %v", err, tt.wantErr) + return + } + if tt.wantErr { + assert.Nil(t, got) + return + } + assert.NotNil(t, got) + }) + } +} diff --git a/server/internal/infrastructure/google/google.go b/server/internal/infrastructure/google/google.go new file mode 100644 index 000000000..7810b2784 --- /dev/null +++ b/server/internal/infrastructure/google/google.go @@ -0,0 +1,18 @@ +package google + +import ( + "io" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" +) + +type google struct { +} + +func NewGoogle() gateway.Google { + return &google{} +} + +func (g google) FetchCSV(token string, fileId string, sheetName string) (*io.ReadCloser, error) { + return fetchCSV(token, fileId, sheetName) +} diff --git a/server/internal/infrastructure/mailer/common.go b/server/internal/infrastructure/mailer/common.go new file mode 100644 index 000000000..82f7a1774 --- /dev/null +++ b/server/internal/infrastructure/mailer/common.go @@ -0,0 +1,112 @@ +package mailer + +import ( + "bytes" + "fmt" + "io" + "mime/multipart" + "net/mail" + "net/textproto" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/log" +) + +func verifyEmails(contacts []gateway.Contact) ([]string, error) { + emails := make([]string, 0, len(contacts)) + for _, c := range contacts { + _, err := mail.ParseAddress(c.Email) + if err != nil { + return nil, fmt.Errorf("invalid email %s", c.Email) + } + emails = append(emails, c.Email) + } + + return emails, nil +} + +type message struct { + to []string + from string + subject string + plainContent string + htmlContent string +} + +func (m *message) encodeContent() (string, error) { + buf := bytes.NewBuffer(nil) + writer := multipart.NewWriter(buf) + boundary := writer.Boundary() + + altBuffer, err := writer.CreatePart(textproto.MIMEHeader{"Content-Type": {"multipart/alternative; boundary=" + boundary}}) + if err != nil { + return "", err + } + altWriter := multipart.NewWriter(altBuffer) + err = altWriter.SetBoundary(boundary) + if err != nil { + return "", err + } + var content io.Writer + content, err = altWriter.CreatePart(textproto.MIMEHeader{"Content-Type": {"text/plain"}}) + if err != nil { + return "", err + } + + _, err = content.Write([]byte(m.plainContent + "\r\n\r\n")) + if err != nil { + return "", err + } + content, err = altWriter.CreatePart(textproto.MIMEHeader{"Content-Type": {"text/html"}}) + if err != nil { + return "", err + } + _, err = content.Write([]byte(m.htmlContent + "\r\n")) + if err != nil { + return "", err + } + _ = altWriter.Close() + return buf.String(), nil +} + +func (m *message) encodeMessage() ([]byte, error) { + buf := bytes.NewBuffer(nil) + buf.WriteString(fmt.Sprintf("Subject: %s\n", m.subject)) + buf.WriteString(fmt.Sprintf("From: %s\n", m.from)) + buf.WriteString(fmt.Sprintf("To: %s\n", strings.Join(m.to, ","))) + content, err := m.encodeContent() + if err != nil { + return nil, err + } + buf.WriteString(content) + + return buf.Bytes(), nil +} + +type ToList []gateway.Contact + +func (l ToList) String() string { + tos := &strings.Builder{} + for i, t := range l { + if t.Name != "" { + _, _ = tos.WriteString(t.Name) + if t.Email != "" { + _, _ = tos.WriteString(" ") + } + } + if t.Email != "" { + _, _ = tos.WriteString("<") + _, _ = tos.WriteString(t.Email) + _, _ = tos.WriteString(">") + } + if len(l)-1 > i { + _, _ = tos.WriteString(", ") + } + } + return tos.String() +} + +func logMail(to ToList, subject string) { + log.Infof("mailer: mail sent: To: %s, Subject: %s", to, subject) +} diff --git a/server/internal/infrastructure/mailer/common_test.go b/server/internal/infrastructure/mailer/common_test.go new file mode 100644 index 000000000..a6ecfce53 --- /dev/null +++ b/server/internal/infrastructure/mailer/common_test.go @@ -0,0 +1,101 @@ +package mailer + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_message_encodeContent(t *testing.T) { + // subject and receiver email are not needed for encoding the content + tests := []struct { + name string + plainContent string + htmlContent string + wantContentTypes []string + wantPlain bool + wantHtml bool + wantErr bool + }{ + { + name: "should return encoded message content", + plainContent: "plain content", + htmlContent: `

html content

`, + wantContentTypes: []string{ + "Content-Type: multipart/alternative", + "Content-Type: text/plain", + "Content-Type: text/html", + }, + wantPlain: true, + wantHtml: true, + wantErr: false, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + m := &message{ + plainContent: tc.plainContent, + htmlContent: tc.htmlContent, + } + got, err := m.encodeContent() + gotTypes := true + for _, ct := range tc.wantContentTypes { + gotTypes = strings.Contains(got, ct) && gotTypes + } + assert.Equal(tt, tc.wantErr, err != nil) + assert.True(tt, gotTypes) + assert.Equal(tt, tc.wantPlain, strings.Contains(got, tc.plainContent)) + assert.Equal(tt, tc.wantHtml, strings.Contains(got, tc.htmlContent)) + }) + } +} + +func Test_message_encodeMessage(t *testing.T) { + tests := []struct { + name string + to []string + subject string + plainContent string + htmlContent string + wantTo bool + wantSubject bool + wantPlain bool + wantHtml bool + wantErr bool + }{ + { + name: "should return encoded message", + to: []string{"someone@email.com"}, + subject: "test", + plainContent: "plain content", + htmlContent: `

html content

`, + wantTo: true, + wantSubject: true, + wantPlain: true, + wantHtml: true, + wantErr: false, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + m := &message{ + to: []string{"someone@email.com"}, + subject: "test", + plainContent: tc.plainContent, + htmlContent: tc.htmlContent, + } + got, err := m.encodeMessage() + str := string(got) + assert.Equal(tt, tc.wantErr, err != nil) + assert.Equal(tt, tc.wantSubject, strings.Contains(str, tc.subject)) + assert.Equal(tt, tc.wantTo, strings.Contains(str, tc.to[0])) + assert.Equal(tt, tc.wantPlain, strings.Contains(str, tc.plainContent)) + assert.Equal(tt, tc.wantHtml, strings.Contains(str, tc.htmlContent)) + }) + } +} diff --git a/server/internal/infrastructure/mailer/direct.go b/server/internal/infrastructure/mailer/direct.go new file mode 100644 index 000000000..27cedbda1 --- /dev/null +++ b/server/internal/infrastructure/mailer/direct.go @@ -0,0 +1,125 @@ +package mailer + +import ( + "errors" + "fmt" + "net" + "net/smtp" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +// NOTE: implemented but it does not work expectedly +type direct struct { + from string +} + +func NewDirect(from string) gateway.Mailer { + if from == "" { + from = "reearth@localhost:8080" + } + return &direct{from: from} +} + +func (m *direct) SendMail(to []gateway.Contact, subject, plainContent, htmlContent string) error { + emails, err := verifyEmails(to) + if err != nil { + return err + } + + emailHosts, err := m.hosts(emails) + if err != nil { + return err + } + + mxHosts, err := m.lookupHosts(emailHosts) + if err != nil { + return err + } + + msg, err := m.message(emails, subject, plainContent, htmlContent) + if err != nil { + return err + } + + for i, to := range emails { + host := mxHosts[i] + if err := m.send(to, host, msg); err != nil { + return err + } + } + + logMail(to, subject) + return nil +} + +func (m *direct) message(emails []string, subject, plainContent, htmlContent string) ([]byte, error) { + msg := &message{ + to: emails, + from: m.from, + subject: subject, + plainContent: plainContent, + htmlContent: htmlContent, + } + encodedMsg, err := msg.encodeMessage() + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + return encodedMsg, nil +} + +func (*direct) hosts(addresses []string) ([]string, error) { + res := make([]string, 0, len(addresses)) + for _, a := range addresses { + s := strings.SplitAfterN(a, "@", 2) + if len(s) != 2 { + return nil, errors.New("invalid email address") + } + res = append(res, s[1]) + } + return res, nil +} + +func (*direct) lookupHosts(hosts []string) ([]string, error) { + res := make([]string, 0, len(hosts)) + for _, h := range hosts { + mxs, err := net.LookupMX(h) + if err != nil { + return nil, errors.New("invalid email address") + } + if len(mxs) == 0 { + return nil, errors.New("invalid email address") + } + res = append(res, strings.TrimSuffix(mxs[0].Host, ".")) + } + return res, nil +} + +func (m *direct) send(to string, host string, msg []byte) error { + c, err := smtp.Dial(fmt.Sprintf("%s:25", host)) + if err != nil { + return rerror.ErrInternalBy(err) + } + if err := c.Mail(m.from); err != nil { + return rerror.ErrInternalBy(err) + } + if err := c.Rcpt(to); err != nil { + return rerror.ErrInternalBy(err) + } + wc, err := c.Data() + if err != nil { + return rerror.ErrInternalBy(err) + } + if _, err = wc.Write(msg); err != nil { + return rerror.ErrInternalBy(err) + } + if err := wc.Close(); err != nil { + return rerror.ErrInternalBy(err) + } + if err := c.Quit(); err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} diff --git a/server/internal/infrastructure/mailer/logger.go b/server/internal/infrastructure/mailer/logger.go new file mode 100644 index 000000000..1d52fba21 --- /dev/null +++ b/server/internal/infrastructure/mailer/logger.go @@ -0,0 +1,21 @@ +package mailer + +import ( + "fmt" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" +) + +const loggerSep = "=======================" + +type logger struct{} + +func NewLogger() gateway.Mailer { + return &logger{} +} + +func (m *logger) SendMail(to []gateway.Contact, subject, plainContent, _ string) error { + logMail(to, subject) + fmt.Printf("%s\n%s\n%s\n", loggerSep, plainContent, loggerSep) + return nil +} diff --git a/server/internal/infrastructure/mailer/mock.go b/server/internal/infrastructure/mailer/mock.go new file mode 100644 index 000000000..b3d04f9bc --- /dev/null +++ b/server/internal/infrastructure/mailer/mock.go @@ -0,0 +1,39 @@ +package mailer + +import ( + "sync" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" +) + +type Mock struct { + lock sync.Mutex + mails []Mail +} + +type Mail struct { + To []gateway.Contact + Subject string + PlainContent string + HTMLContent string +} + +func NewMock() *Mock { + return &Mock{} +} + +func (m *Mock) SendMail(to []gateway.Contact, subject, text, html string) error { + m.lock.Lock() + defer m.lock.Unlock() + m.mails = append(m.mails, Mail{ + To: append([]gateway.Contact{}, to...), + Subject: subject, + PlainContent: text, + HTMLContent: html, + }) + return nil +} + +func (m *Mock) Mails() []Mail { + return append([]Mail{}, m.mails...) +} diff --git a/server/internal/infrastructure/mailer/sendgrid.go b/server/internal/infrastructure/mailer/sendgrid.go new file mode 100644 index 000000000..07ec200ed --- /dev/null +++ b/server/internal/infrastructure/mailer/sendgrid.go @@ -0,0 +1,36 @@ +package mailer + +import ( + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/sendgrid/sendgrid-go" + "github.com/sendgrid/sendgrid-go/helpers/mail" +) + +type sendgridMailer struct { + name string + email string + client *sendgrid.Client +} + +func NewSendGrid(senderName, senderEmail, api string) gateway.Mailer { + return &sendgridMailer{ + name: senderName, + email: senderEmail, + client: sendgrid.NewSendClient(api), + } +} + +func (m *sendgridMailer) SendMail(to []gateway.Contact, subject, plainContent, htmlContent string) error { + for _, t := range to { + sender := mail.NewEmail(m.name, m.email) + receiver := mail.NewEmail(t.Name, t.Email) + message := mail.NewSingleEmail(sender, subject, receiver, plainContent, htmlContent) + _, err := m.client.Send(message) + if err != nil { + return err + } + } + + logMail(to, subject) + return nil +} diff --git a/server/internal/infrastructure/mailer/smtp.go b/server/internal/infrastructure/mailer/smtp.go new file mode 100644 index 000000000..be72c3675 --- /dev/null +++ b/server/internal/infrastructure/mailer/smtp.go @@ -0,0 +1,58 @@ +package mailer + +import ( + "errors" + "net/smtp" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" +) + +type smtpMailer struct { + host string + port string + email string + username string + password string +} + +func NewSMTP(host, port, username, email, password string) gateway.Mailer { + return &smtpMailer{ + host: host, + port: port, + username: username, + email: email, + password: password, + } +} + +func (m *smtpMailer) SendMail(to []gateway.Contact, subject, plainContent, htmlContent string) error { + emails, err := verifyEmails(to) + if err != nil { + return err + } + + msg := &message{ + to: emails, + from: m.email, + subject: subject, + plainContent: plainContent, + htmlContent: htmlContent, + } + + encodedMsg, err := msg.encodeMessage() + if err != nil { + return err + } + + auth := smtp.PlainAuth("", m.username, m.password, m.host) + if len(m.host) == 0 { + return errors.New("invalid smtp url") + } + + if err := smtp.SendMail(m.host+":"+m.port, auth, m.email, emails, encodedMsg); err != nil { + return err + } + + logMail(to, subject) + return nil +} diff --git a/server/internal/infrastructure/marketplace/marketplace.go b/server/internal/infrastructure/marketplace/marketplace.go new file mode 100644 index 000000000..0dac3946d --- /dev/null +++ b/server/internal/infrastructure/marketplace/marketplace.go @@ -0,0 +1,112 @@ +package marketplace + +import ( + "context" + "fmt" + "net/http" + "strings" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin/pluginpack" + "github.com/reearth/reearth-backend/pkg/rerror" + "golang.org/x/oauth2/clientcredentials" +) + +var pluginPackageSizeLimit int64 = 10 * 1024 * 1024 // 10MB + +type Marketplace struct { + endpoint string + conf clientcredentials.Config + client *http.Client +} + +func New(endpoint string, conf clientcredentials.Config) *Marketplace { + return &Marketplace{ + endpoint: strings.TrimSuffix(endpoint, "/"), + client: conf.Client(context.Background()), + } +} + +func (m *Marketplace) FetchPluginPackage(ctx context.Context, pid id.PluginID) (*pluginpack.Package, error) { + purl, err := m.getPluginURL(ctx, pid) + if err != nil { + return nil, err + } + return m.downloadPluginPackage(ctx, purl) +} + +func (m *Marketplace) getPluginURL(_ context.Context, pid id.PluginID) (string, error) { + return fmt.Sprintf("%s/api/plugins/%s/%s.zip", m.endpoint, pid.Name(), pid.Version().String()), nil +} + +/* +func (m *Marketplace) getPluginURL(ctx context.Context, pid id.PluginID) (string, error) { + body := strings.NewReader(fmt.Sprintf( + `{"query":"query { node(id:"%s" type:PLUGIN) { ...Plugin { url } } }"}`, + pid.Name(), + )) + req, err := http.NewRequestWithContext(ctx, "POST", m.endpoint+"/graphql", body) + if err != nil { + return "", rerror.ErrInternalBy(err) + } + req.Header.Set("Content-Type", "application/json") + + res, err := m.client.Do(req) + if err != nil { + return "", rerror.ErrInternalBy(err) + } + if res.StatusCode != http.StatusOK { + return "", rerror.ErrNotFound + } + defer func() { + _ = res.Body.Close() + }() + var pluginRes response + if err := json.NewDecoder(res.Body).Decode(&pluginRes); err != nil { + return "", rerror.ErrInternalBy(err) + } + if pluginRes.Errors != nil { + return "", rerror.ErrInternalBy(fmt.Errorf("gql returns errors: %v", pluginRes.Errors)) + } + + purl := pluginRes.PluginURL() + if purl == "" { + return "", rerror.ErrNotFound + } + return purl, nil +} + +type response struct { + Data pluginNodeQueryData `json:"data"` + Errors any `json:"errors"` +} + +func (r response) PluginURL() string { + return r.Data.Node.URL +} + +type pluginNodeQueryData struct { + Node plugin +} + +type plugin struct { + URL string `json:"url"` +} +*/ + +func (m *Marketplace) downloadPluginPackage(ctx context.Context, url string) (*pluginpack.Package, error) { + res, err := m.client.Get(url) + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + defer func() { + _ = res.Body.Close() + }() + if res.StatusCode == http.StatusNotFound { + return nil, rerror.ErrNotFound + } + if res.StatusCode != http.StatusOK { + return nil, rerror.ErrInternalBy(fmt.Errorf("status code is %s", res.Status)) + } + return pluginpack.PackageFromZip(res.Body, nil, pluginPackageSizeLimit) +} diff --git a/server/internal/infrastructure/marketplace/marketplace_test.go b/server/internal/infrastructure/marketplace/marketplace_test.go new file mode 100644 index 000000000..1876c9b4b --- /dev/null +++ b/server/internal/infrastructure/marketplace/marketplace_test.go @@ -0,0 +1,108 @@ +package marketplace + +import ( + "context" + "io" + "net/http" + "net/url" + "os" + "testing" + + "github.com/jarcoal/httpmock" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" + "golang.org/x/oauth2/clientcredentials" +) + +func TestMarketplace_FetchPluginPackage(t *testing.T) { + ac := "xxxxx" + pid := id.MustPluginID("testplugin~1.0.1") + + f, err := os.Open("testdata/test.zip") + assert.NoError(t, err) + defer func() { + _ = f.Close() + }() + z, err := io.ReadAll(f) + assert.NoError(t, err) + + httpmock.Activate() + defer httpmock.Deactivate() + + httpmock.RegisterResponder( + "POST", "https://marketplace.example.com/oauth/token", + func(req *http.Request) (*http.Response, error) { + _ = req.ParseForm() + if req.Form.Get("grant_type") != "client_credentials" { + return httpmock.NewStringResponse(http.StatusUnauthorized, ""), nil + } + if req.Form.Get("audience") != "d" { + return httpmock.NewStringResponse(http.StatusUnauthorized, ""), nil + } + if req.Form.Get("client_id") != "x" { + return httpmock.NewStringResponse(http.StatusUnauthorized, ""), nil + } + if req.Form.Get("client_secret") != "y" { + return httpmock.NewStringResponse(http.StatusUnauthorized, ""), nil + } + + resp, err := httpmock.NewJsonResponse(200, map[string]any{ + "access_token": ac, + "token_type": "Bearer", + "expires_in": 86400, + }) + if err != nil { + return httpmock.NewStringResponse(http.StatusInternalServerError, ""), nil + } + return resp, nil + }, + ) + + /* + httpmock.RegisterResponder( + "POST", "https://marketplace.example.com/graphql", + func(req *http.Request) (*http.Response, error) { + if req.Header.Get("Authorization") != "Bearer "+ac { + return httpmock.NewStringResponse(http.StatusUnauthorized, ""), nil + } + if req.Header.Get("Content-Type") != "application/json" { + return httpmock.NewStringResponse(http.StatusBadRequest, ""), nil + } + resp, err := httpmock.NewJsonResponse(200, map[string]any{ + "data": map[string]any{ + "node": map[string]string{ + "url": "https://marketplace.example.com/aaa.zip", + }, + }, + }) + if err != nil { + return httpmock.NewStringResponse(http.StatusInternalServerError, ""), nil + } + return resp, nil + }, + ) + */ + + httpmock.RegisterResponder( + "GET", "https://marketplace.example.com/api/plugins/testplugin/1.0.1.zip", + func(req *http.Request) (*http.Response, error) { + if req.Header.Get("Authorization") != "Bearer "+ac { + return httpmock.NewStringResponse(http.StatusUnauthorized, ""), nil + } + return httpmock.NewBytesResponse(http.StatusOK, z), nil + }, + ) + + m := New("https://marketplace.example.com/", clientcredentials.Config{ + ClientID: "x", + ClientSecret: "y", + TokenURL: "https://marketplace.example.com/oauth/token", + EndpointParams: url.Values{ + "audience": []string{"d"}, + }, + }) + got, err := m.FetchPluginPackage(context.Background(), pid) + assert.NoError(t, err) + // no need to test pluginpack in detail here + assert.Equal(t, id.MustPluginID("testplugin~1.0.1"), got.Manifest.Plugin.ID()) +} diff --git a/server/internal/infrastructure/marketplace/testdata/test.zip b/server/internal/infrastructure/marketplace/testdata/test.zip new file mode 100644 index 000000000..0d371acbe Binary files /dev/null and b/server/internal/infrastructure/marketplace/testdata/test.zip differ diff --git a/server/internal/infrastructure/memory/asset.go b/server/internal/infrastructure/memory/asset.go new file mode 100644 index 000000000..73a463f98 --- /dev/null +++ b/server/internal/infrastructure/memory/asset.go @@ -0,0 +1,133 @@ +package memory + +import ( + "context" + "sort" + "strings" + "sync" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type Asset struct { + lock sync.Mutex + data map[id.AssetID]*asset.Asset + f repo.TeamFilter +} + +func NewAsset() repo.Asset { + return &Asset{ + data: map[id.AssetID]*asset.Asset{}, + } +} + +func (r *Asset) Filtered(f repo.TeamFilter) repo.Asset { + return &Asset{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: r.f.Merge(f), + } +} + +func (r *Asset) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, error) { + r.lock.Lock() + defer r.lock.Unlock() + + d, ok := r.data[id] + if ok && r.f.CanRead(d.Team()) { + return d, nil + } + return &asset.Asset{}, rerror.ErrNotFound +} + +func (r *Asset) FindByIDs(ctx context.Context, ids id.AssetIDList) ([]*asset.Asset, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := []*asset.Asset{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + if r.f.CanRead(d.Team()) { + result = append(result, d) + continue + } + } + result = append(result, nil) + } + return result, nil +} + +func (r *Asset) FindByTeam(ctx context.Context, id id.TeamID, filter repo.AssetFilter) ([]*asset.Asset, *usecase.PageInfo, error) { + if !r.f.CanRead(id) { + return nil, usecase.EmptyPageInfo(), nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + result := []*asset.Asset{} + for _, d := range r.data { + if d.Team() == id && (filter.Keyword == nil || strings.Contains(d.Name(), *filter.Keyword)) { + result = append(result, d) + } + } + + if filter.Sort != nil { + s := *filter.Sort + sort.SliceStable(result, func(i, j int) bool { + if s == asset.SortTypeID { + return result[i].ID().Compare(result[j].ID()) < 0 + } + if s == asset.SortTypeSize { + return result[i].Size() < result[j].Size() + } + if s == asset.SortTypeName { + return strings.Compare(result[i].Name(), result[j].Name()) < 0 + } + return false + }) + } + + var startCursor, endCursor *usecase.Cursor + if len(result) > 0 { + _startCursor := usecase.Cursor(result[0].ID().String()) + _endCursor := usecase.Cursor(result[len(result)-1].ID().String()) + startCursor = &_startCursor + endCursor = &_endCursor + } + + return result, usecase.NewPageInfo( + len(r.data), + startCursor, + endCursor, + true, + true, + ), nil +} + +func (r *Asset) Save(ctx context.Context, a *asset.Asset) error { + if !r.f.CanWrite(a.Team()) { + return repo.ErrOperationDenied + } + + r.lock.Lock() + defer r.lock.Unlock() + + r.data[a.ID()] = a + return nil +} + +func (r *Asset) Remove(ctx context.Context, id id.AssetID) error { + r.lock.Lock() + defer r.lock.Unlock() + + if a, ok := r.data[id]; ok && r.f.CanWrite(a.Team()) { + delete(r.data, id) + } + + return nil +} diff --git a/server/internal/infrastructure/memory/auth_request.go b/server/internal/infrastructure/memory/auth_request.go new file mode 100644 index 000000000..02daad83d --- /dev/null +++ b/server/internal/infrastructure/memory/auth_request.go @@ -0,0 +1,75 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/auth" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type AuthRequest struct { + lock sync.Mutex + data map[id.AuthRequestID]*auth.Request +} + +func NewAuthRequest() repo.AuthRequest { + return &AuthRequest{ + data: map[id.AuthRequestID]*auth.Request{}, + } +} + +func (r *AuthRequest) FindByID(_ context.Context, id id.AuthRequestID) (*auth.Request, error) { + r.lock.Lock() + defer r.lock.Unlock() + + d, ok := r.data[id] + if ok { + return d, nil + } + return nil, rerror.ErrNotFound +} + +func (r *AuthRequest) FindByCode(_ context.Context, s string) (*auth.Request, error) { + r.lock.Lock() + defer r.lock.Unlock() + + for _, ar := range r.data { + if ar.GetCode() == s { + return ar, nil + } + } + + return nil, rerror.ErrNotFound +} + +func (r *AuthRequest) FindBySubject(_ context.Context, s string) (*auth.Request, error) { + r.lock.Lock() + defer r.lock.Unlock() + + for _, ar := range r.data { + if ar.GetSubject() == s { + return ar, nil + } + } + + return nil, rerror.ErrNotFound +} + +func (r *AuthRequest) Save(_ context.Context, request *auth.Request) error { + r.lock.Lock() + defer r.lock.Unlock() + + r.data[request.ID()] = request + return nil +} + +func (r *AuthRequest) Remove(_ context.Context, requestID id.AuthRequestID) error { + r.lock.Lock() + defer r.lock.Unlock() + + delete(r.data, requestID) + return nil +} diff --git a/server/internal/infrastructure/memory/config.go b/server/internal/infrastructure/memory/config.go new file mode 100644 index 000000000..07d18d1e3 --- /dev/null +++ b/server/internal/infrastructure/memory/config.go @@ -0,0 +1,46 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/config" +) + +type Config struct { + lock sync.Mutex + locked bool + data *config.Config +} + +func NewConfig() repo.Config { + return &Config{} +} + +func (r *Config) LockAndLoad(ctx context.Context) (*config.Config, error) { + r.lock.Lock() + r.locked = true + return r.data, nil +} + +func (r *Config) Save(ctx context.Context, c *config.Config) error { + if c != nil { + r.data = c + } + return nil +} + +func (r *Config) SaveAndUnlock(ctx context.Context, c *config.Config) error { + _ = r.Save(ctx, c) + return r.Unlock(ctx) +} + +func (r *Config) Unlock(_ context.Context) error { + if !r.locked { + return nil + } + r.lock.Unlock() + r.locked = false + return nil +} diff --git a/server/internal/infrastructure/memory/config_test.go b/server/internal/infrastructure/memory/config_test.go new file mode 100644 index 000000000..bba230ae4 --- /dev/null +++ b/server/internal/infrastructure/memory/config_test.go @@ -0,0 +1,14 @@ +package memory + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestConfig(t *testing.T) { + ctx := context.Background() + c := NewConfig() + assert.NoError(t, c.Unlock(ctx)) +} diff --git a/server/internal/infrastructure/memory/container.go b/server/internal/infrastructure/memory/container.go new file mode 100644 index 000000000..c8ec4fe41 --- /dev/null +++ b/server/internal/infrastructure/memory/container.go @@ -0,0 +1,26 @@ +package memory + +import ( + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +func New() *repo.Container { + c := &repo.Container{} + c.Asset = NewAsset() + c.Config = NewConfig() + c.DatasetSchema = NewDatasetSchema() + c.Dataset = NewDataset() + c.Layer = NewLayer() + c.Plugin = NewPlugin() + c.Project = NewProject() + c.PropertySchema = NewPropertySchema() + c.Property = NewProperty() + c.Scene = NewScene() + c.Tag = NewTag() + c.Team = NewTeam() + c.User = NewUser() + c.SceneLock = NewSceneLock() + c.Transaction = NewTransaction() + c.Lock = NewLock() + return c +} diff --git a/server/internal/infrastructure/memory/dataset.go b/server/internal/infrastructure/memory/dataset.go new file mode 100644 index 000000000..af3525b64 --- /dev/null +++ b/server/internal/infrastructure/memory/dataset.go @@ -0,0 +1,202 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type Dataset struct { + lock sync.Mutex + data map[id.DatasetID]*dataset.Dataset + f repo.SceneFilter +} + +func NewDataset() repo.Dataset { + return &Dataset{ + data: map[id.DatasetID]*dataset.Dataset{}, + } +} + +func (r *Dataset) Filtered(f repo.SceneFilter) repo.Dataset { + return &Dataset{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: r.f.Merge(f), + } +} + +func (r *Dataset) FindByID(ctx context.Context, id id.DatasetID) (*dataset.Dataset, error) { + r.lock.Lock() + defer r.lock.Unlock() + + p, ok := r.data[id] + if ok && r.f.CanRead(p.Scene()) { + return p, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Dataset) FindByIDs(ctx context.Context, ids id.DatasetIDList) (dataset.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := dataset.List{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + if r.f.CanRead(d.Scene()) { + result = append(result, d) + continue + } + } + result = append(result, nil) + } + return result, nil +} + +func (r *Dataset) FindBySchema(ctx context.Context, id id.DatasetSchemaID, p *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := dataset.List{} + for _, d := range r.data { + if d.Schema() == id && r.f.CanRead(d.Scene()) { + result = append(result, d) + } + } + + var startCursor, endCursor *usecase.Cursor + if len(result) > 0 { + _startCursor := usecase.Cursor(result[0].ID().String()) + _endCursor := usecase.Cursor(result[len(result)-1].ID().String()) + startCursor = &_startCursor + endCursor = &_endCursor + } + + return result, usecase.NewPageInfo( + len(r.data), + startCursor, + endCursor, + true, + true, + ), nil +} + +func (r *Dataset) CountBySchema(ctx context.Context, id id.DatasetSchemaID) (int, error) { + r.lock.Lock() + defer r.lock.Unlock() + + n := 0 + for _, dataset := range r.data { + if dataset.Schema() == id { + if r.f.CanRead(dataset.Scene()) { + n++ + } + } + } + return n, nil +} + +func (r *Dataset) FindBySchemaAll(ctx context.Context, id id.DatasetSchemaID) (dataset.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := dataset.List{} + for _, d := range r.data { + if d.Schema() == id && r.f.CanRead(d.Scene()) { + result = append(result, d) + } + } + return result, nil +} + +func (r *Dataset) FindGraph(ctx context.Context, i id.DatasetID, fields id.DatasetFieldIDList) (dataset.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := make(dataset.List, 0, len(fields)) + next := i + for _, nextField := range fields { + if d := r.data[next]; d != nil && r.f.CanRead(d.Scene()) { + result = append(result, d) + if f := d.Field(nextField); f != nil { + if f.Type() == dataset.ValueTypeRef { + if l := f.Value().ValueRef(); l != nil { + if did, err := id.DatasetIDFrom(*l); err == nil { + next = did + continue + } + } + } + } + } + } + return result, nil +} + +func (r *Dataset) Save(ctx context.Context, d *dataset.Dataset) error { + if !r.f.CanWrite(d.Scene()) { + return repo.ErrOperationDenied + } + + r.lock.Lock() + defer r.lock.Unlock() + + r.data[d.ID()] = d + return nil +} + +func (r *Dataset) SaveAll(ctx context.Context, dl dataset.List) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, d := range dl { + if r.f.CanWrite(d.Scene()) { + r.data[d.ID()] = d + } + } + return nil +} + +func (r *Dataset) Remove(ctx context.Context, id id.DatasetID) error { + r.lock.Lock() + defer r.lock.Unlock() + + if d, ok := r.data[id]; ok && r.f.CanWrite(d.Scene()) { + delete(r.data, id) + } + return nil +} + +func (r *Dataset) RemoveAll(ctx context.Context, ids id.DatasetIDList) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, id := range ids { + if d, ok := r.data[id]; ok && r.f.CanWrite(d.Scene()) { + delete(r.data, id) + } + } + return nil +} + +func (r *Dataset) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + for did, d := range r.data { + if d.Scene() == sceneID { + delete(r.data, did) + } + } + return nil +} diff --git a/server/internal/infrastructure/memory/dataset_schema.go b/server/internal/infrastructure/memory/dataset_schema.go new file mode 100644 index 000000000..faa2a77aa --- /dev/null +++ b/server/internal/infrastructure/memory/dataset_schema.go @@ -0,0 +1,219 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type DatasetSchema struct { + lock sync.Mutex + data map[id.DatasetSchemaID]*dataset.Schema + f repo.SceneFilter +} + +func NewDatasetSchema() repo.DatasetSchema { + return &DatasetSchema{ + data: map[id.DatasetSchemaID]*dataset.Schema{}, + } +} + +func (r *DatasetSchema) Filtered(f repo.SceneFilter) repo.DatasetSchema { + return &DatasetSchema{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: r.f.Merge(f), + } +} + +func (r *DatasetSchema) FindByID(ctx context.Context, id id.DatasetSchemaID) (*dataset.Schema, error) { + r.lock.Lock() + defer r.lock.Unlock() + + p, ok := r.data[id] + if ok && r.f.CanRead(p.Scene()) { + return p, nil + } + return nil, rerror.ErrNotFound +} + +func (r *DatasetSchema) FindByIDs(ctx context.Context, ids id.DatasetSchemaIDList) (dataset.SchemaList, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := dataset.SchemaList{} + for _, id := range ids { + if d, ok := r.data[id]; ok && r.f.CanRead(d.Scene()) { + d2 := d + result = append(result, d2) + } else { + result = append(result, nil) + } + } + return result, nil +} + +func (r *DatasetSchema) FindByScene(ctx context.Context, s id.SceneID, p *usecase.Pagination) (dataset.SchemaList, *usecase.PageInfo, error) { + if !r.f.CanRead(s) { + return nil, usecase.EmptyPageInfo(), nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + result := dataset.SchemaList{} + for _, d := range r.data { + if d.Scene() == s { + d2 := d + result = append(result, d2) + } + } + + var startCursor, endCursor *usecase.Cursor + if len(result) > 0 { + _startCursor := usecase.Cursor(result[0].ID().String()) + _endCursor := usecase.Cursor(result[len(result)-1].ID().String()) + startCursor = &_startCursor + endCursor = &_endCursor + } + + return result, usecase.NewPageInfo( + len(r.data), + startCursor, + endCursor, + true, + true, + ), nil +} + +func (r *DatasetSchema) FindBySceneAll(ctx context.Context, s id.SceneID) (dataset.SchemaList, error) { + if !r.f.CanRead(s) { + return nil, nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + result := dataset.SchemaList{} + for _, d := range r.data { + if d.Scene() == s && r.f.CanRead(d.Scene()) { + d2 := d + result = append(result, d2) + } + } + return result, nil +} + +func (r *DatasetSchema) FindAllDynamicByScene(ctx context.Context, s id.SceneID) (dataset.SchemaList, error) { + if !r.f.CanRead(s) { + return nil, nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + result := dataset.SchemaList{} + for _, d := range r.data { + if d.Scene() == s && d.Dynamic() && r.f.CanRead(d.Scene()) { + d2 := d + result = append(result, d2) + } + } + return result, nil +} + +func (r *DatasetSchema) FindDynamicByID(ctx context.Context, id id.DatasetSchemaID) (*dataset.Schema, error) { + r.lock.Lock() + defer r.lock.Unlock() + + p, ok := r.data[id] + if ok && p.Dynamic() && r.f.CanRead(p.Scene()) { + return p, nil + } + return nil, rerror.ErrNotFound +} + +func (r *DatasetSchema) FindBySceneAndSource(ctx context.Context, s id.SceneID, src string) (dataset.SchemaList, error) { + if !r.f.CanRead(s) { + return nil, rerror.ErrNotFound + } + + r.lock.Lock() + defer r.lock.Unlock() + + result := dataset.SchemaList{} + for _, d := range r.data { + if d.Scene() == s && d.Source() == src { + d2 := d + result = append(result, d2) + } + } + return result, nil +} + +func (r *DatasetSchema) Save(ctx context.Context, d *dataset.Schema) error { + if !r.f.CanWrite(d.Scene()) { + return repo.ErrOperationDenied + } + + r.lock.Lock() + defer r.lock.Unlock() + + r.data[d.ID()] = d + return nil +} + +func (r *DatasetSchema) SaveAll(ctx context.Context, dl dataset.SchemaList) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, d := range dl { + if r.f.CanWrite(d.Scene()) { + r.data[d.ID()] = d + } + } + return nil +} + +func (r *DatasetSchema) Remove(ctx context.Context, id id.DatasetSchemaID) error { + r.lock.Lock() + defer r.lock.Unlock() + + if d, ok := r.data[id]; ok && r.f.CanWrite(d.Scene()) { + delete(r.data, id) + } + return nil +} + +func (r *DatasetSchema) RemoveAll(ctx context.Context, ids id.DatasetSchemaIDList) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, id := range ids { + if d, ok := r.data[id]; ok && r.f.CanWrite(d.Scene()) { + delete(r.data, id) + } + } + return nil +} + +func (r *DatasetSchema) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + for did, d := range r.data { + if d.Scene() == sceneID { + delete(r.data, did) + } + } + return nil +} diff --git a/server/internal/infrastructure/memory/layer.go b/server/internal/infrastructure/memory/layer.go new file mode 100644 index 000000000..00ada3ad6 --- /dev/null +++ b/server/internal/infrastructure/memory/layer.go @@ -0,0 +1,360 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type Layer struct { + lock sync.Mutex + data map[id.LayerID]layer.Layer + f repo.SceneFilter +} + +func NewLayer() repo.Layer { + return &Layer{ + data: map[id.LayerID]layer.Layer{}, + } +} + +func NewLayerWith(items ...layer.Layer) repo.Layer { + r := NewLayer() + for _, i := range items { + _ = r.Save(nil, i) + } + return r +} + +func (r *Layer) Filtered(f repo.SceneFilter) repo.Layer { + return &Layer{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: r.f.Merge(f), + } +} + +func (r *Layer) FindByID(ctx context.Context, id id.LayerID) (layer.Layer, error) { + r.lock.Lock() + defer r.lock.Unlock() + + res, ok := r.data[id] + if ok && r.f.CanRead(res.Scene()) { + return res, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Layer) FindByIDs(ctx context.Context, ids id.LayerIDList) (layer.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := layer.List{} + for _, id := range ids { + if d, ok := r.data[id]; ok && r.f.CanRead(d.Scene()) { + result = append(result, &d) + continue + } + result = append(result, nil) + } + return result, nil +} + +func (r *Layer) FindGroupByIDs(ctx context.Context, ids id.LayerIDList) (layer.GroupList, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := layer.GroupList{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + if lg := layer.GroupFromLayer(d); lg != nil && r.f.CanRead(lg.Scene()) { + result = append(result, lg) + continue + } + result = append(result, nil) + } + } + return result, nil +} + +func (r *Layer) FindItemByIDs(ctx context.Context, ids id.LayerIDList) (layer.ItemList, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := layer.ItemList{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + if li := layer.ItemFromLayer(d); li != nil && r.f.CanRead(li.Scene()) { + result = append(result, li) + continue + } + result = append(result, nil) + } + } + return result, nil +} + +func (r *Layer) FindItemByID(ctx context.Context, id id.LayerID) (*layer.Item, error) { + r.lock.Lock() + defer r.lock.Unlock() + + d, ok := r.data[id] + if !ok { + return &layer.Item{}, nil + } + if li := layer.ItemFromLayer(d); li != nil && r.f.CanRead(li.Scene()) { + return li, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Layer) FindGroupByID(ctx context.Context, id id.LayerID) (*layer.Group, error) { + r.lock.Lock() + defer r.lock.Unlock() + + d, ok := r.data[id] + if !ok { + return &layer.Group{}, nil + } + if lg := layer.GroupFromLayer(d); lg != nil && r.f.CanRead(lg.Scene()) { + return lg, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Layer) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, s id.SceneID, ds id.DatasetSchemaID) (layer.GroupList, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := layer.GroupList{} + for _, l := range r.data { + if l.Scene() != s { + continue + } + if lg := layer.ToLayerGroup(l); lg != nil && r.f.CanRead(lg.Scene()) { + if dsid := lg.LinkedDatasetSchema(); dsid != nil && *dsid == ds { + result = append(result, lg) + } + } + } + return result, nil +} + +func (r *Layer) FindParentsByIDs(_ context.Context, ids id.LayerIDList) (layer.GroupList, error) { + r.lock.Lock() + defer r.lock.Unlock() + + res := layer.GroupList{} + for _, l := range r.data { + if lg := layer.ToLayerGroup(l); lg != nil && r.f.CanRead(l.Scene()) { + for _, cl := range lg.Layers().Layers() { + if ids.Has(cl) { + res = append(res, lg) + } + } + } + } + + return res, nil +} + +func (r *Layer) FindByPluginAndExtension(_ context.Context, pid id.PluginID, eid *id.PluginExtensionID) (layer.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + res := layer.List{} + for _, l := range r.data { + l := l + if r.f.CanRead(l.Scene()) && l.Plugin() != nil && l.Plugin().Equal(pid) { + e := l.Extension() + if eid == nil || e != nil && *e == *eid { + res = append(res, &l) + } + } + } + + return res, nil +} + +func (r *Layer) FindByPluginAndExtensionOfBlocks(_ context.Context, pid id.PluginID, eid *id.PluginExtensionID) (layer.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + res := layer.List{} + for _, l := range r.data { + l := l + if !r.f.CanRead(l.Scene()) || len(l.Infobox().FieldsByPlugin(pid, eid)) == 0 { + continue + } + res = append(res, &l) + } + + return res, nil +} + +func (r *Layer) FindByProperty(ctx context.Context, id id.PropertyID) (layer.Layer, error) { + r.lock.Lock() + defer r.lock.Unlock() + + for _, l := range r.data { + if !r.f.CanRead(l.Scene()) { + continue + } + if pid := l.Property(); pid != nil && *pid == id { + return l, nil + } + if pid := l.Infobox().PropertyRef(); pid != nil && *pid == id { + return l, nil + } + for _, f := range l.Infobox().Fields() { + if f.Property() == id { + return l, nil + } + } + } + return nil, rerror.ErrNotFound +} + +func (r *Layer) FindParentByID(ctx context.Context, id id.LayerID) (*layer.Group, error) { + r.lock.Lock() + defer r.lock.Unlock() + + for _, l := range r.data { + if lg := layer.ToLayerGroup(l); lg != nil && r.f.CanRead(l.Scene()) { + for _, cl := range lg.Layers().Layers() { + if cl == id { + return lg, nil + } + } + } + } + return nil, rerror.ErrNotFound +} + +func (r *Layer) FindByScene(ctx context.Context, sceneID id.SceneID) (layer.List, error) { + if !r.f.CanRead(sceneID) { + return nil, nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + res := layer.List{} + for _, l := range r.data { + l := l + if l.Scene() == sceneID { + res = append(res, &l) + } + } + return res, nil +} + +func (r *Layer) FindAllByDatasetSchema(ctx context.Context, datasetSchemaID id.DatasetSchemaID) (layer.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + res := layer.List{} + for _, l := range r.data { + l := l + if d := layer.ToLayerGroup(l).LinkedDatasetSchema(); d != nil && *d == datasetSchemaID && r.f.CanRead(l.Scene()) { + res = append(res, &l) + } + } + return res, nil +} + +func (r *Layer) FindByTag(ctx context.Context, tagID id.TagID) (layer.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + var res layer.List + for _, l := range r.data { + l := l + if l.Tags().Has(tagID) && r.f.CanRead(l.Scene()) { + res = append(res, &l) + } + } + + return res, nil +} + +func (r *Layer) Save(ctx context.Context, l layer.Layer) error { + if !r.f.CanWrite(l.Scene()) { + return repo.ErrOperationDenied + } + + r.lock.Lock() + defer r.lock.Unlock() + + r.data[l.ID()] = l + return nil +} + +func (r *Layer) SaveAll(ctx context.Context, ll layer.List) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, l := range ll { + layer := *l + if r.f.CanWrite(layer.Scene()) { + r.data[layer.ID()] = layer + } + } + return nil +} + +func (r *Layer) UpdatePlugin(ctx context.Context, old id.PluginID, new id.PluginID) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, l := range r.data { + p := l.Plugin() + if p != nil && p.Equal(old) && r.f.CanWrite(l.Scene()) { + l.SetPlugin(&new) + r.data[l.ID()] = l + } + } + return nil +} + +func (r *Layer) Remove(ctx context.Context, id id.LayerID) error { + r.lock.Lock() + defer r.lock.Unlock() + + if l, ok := r.data[id]; ok && l != nil && r.f.CanWrite(l.Scene()) { + delete(r.data, id) + } + return nil +} + +func (r *Layer) RemoveAll(ctx context.Context, ids id.LayerIDList) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, id := range ids { + if l, ok := r.data[id]; ok && l != nil && r.f.CanWrite(l.Scene()) { + delete(r.data, id) + } + } + return nil +} + +func (r *Layer) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + for lid, p := range r.data { + if p.Scene() == sceneID { + delete(r.data, lid) + } + } + return nil +} diff --git a/server/internal/infrastructure/memory/layer_test.go b/server/internal/infrastructure/memory/layer_test.go new file mode 100644 index 000000000..d3d2693c3 --- /dev/null +++ b/server/internal/infrastructure/memory/layer_test.go @@ -0,0 +1,29 @@ +package memory + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/tag" + "github.com/stretchr/testify/assert" +) + +func TestLayer_FindByTag(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := layer.NewTagList([]layer.Tag{layer.NewTagGroup(t1.ID(), nil)}) + lg := layer.New().NewID().Tags(tl).Scene(sid).Group().MustBuild() + + repo := Layer{ + data: map[id.LayerID]layer.Layer{ + lg.ID(): lg, + }, + } + + out, err := repo.FindByTag(ctx, t1.ID()) + assert.NoError(t, err) + assert.Equal(t, layer.List{lg.LayerRef()}, out) +} diff --git a/server/internal/infrastructure/memory/lock.go b/server/internal/infrastructure/memory/lock.go new file mode 100644 index 000000000..632586c3a --- /dev/null +++ b/server/internal/infrastructure/memory/lock.go @@ -0,0 +1,21 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +type Lock struct{} + +func NewLock() repo.Lock { + return &Lock{} +} + +func (r *Lock) Lock(_ context.Context, _ string) error { + return nil +} + +func (r *Lock) Unlock(_ context.Context, _ string) error { + return nil +} diff --git a/server/internal/infrastructure/memory/plugin.go b/server/internal/infrastructure/memory/plugin.go new file mode 100644 index 000000000..4e3db19df --- /dev/null +++ b/server/internal/infrastructure/memory/plugin.go @@ -0,0 +1,118 @@ +package memory + +import ( + "context" + "errors" + "sync" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type Plugin struct { + lock sync.Mutex + data []*plugin.Plugin + f repo.SceneFilter +} + +func NewPlugin() repo.Plugin { + return &Plugin{ + data: []*plugin.Plugin{}, + } +} + +func NewPluginWith(items ...*plugin.Plugin) repo.Plugin { + r := NewPlugin() + for _, i := range items { + _ = r.Save(nil, i) + } + return r +} + +func (r *Plugin) Filtered(f repo.SceneFilter) repo.Plugin { + return &Plugin{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: r.f.Merge(f), + } +} + +func (r *Plugin) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if p := builtin.GetPlugin(id); p != nil { + return p, nil + } + for _, p := range r.data { + if p.ID().Equal(id) { + if s := p.ID().Scene(); s == nil || r.f.CanRead(*s) { + return p.Clone(), nil + } + } + } + return nil, rerror.ErrNotFound +} + +func (r *Plugin) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := []*plugin.Plugin{} + for _, id := range ids { + if p := builtin.GetPlugin(id); p != nil { + result = append(result, p) + continue + } + for _, p := range r.data { + if p.ID().Equal(id) { + if s := p.ID().Scene(); s == nil || r.f.CanRead(*s) { + result = append(result, p.Clone()) + } else { + result = append(result, nil) + } + } + } + } + return result, nil +} + +func (r *Plugin) Save(ctx context.Context, p *plugin.Plugin) error { + r.lock.Lock() + defer r.lock.Unlock() + + if p.ID().System() { + return errors.New("cannnot save system plugin") + } + if s := p.ID().Scene(); s != nil && !r.f.CanWrite(*s) { + return repo.ErrOperationDenied + } + + for i, q := range r.data { + if q.ID().Equal(p.ID()) { + r.data = append(r.data[:i], r.data[i+1:]...) + break + } + } + r.data = append(r.data, p.Clone()) + return nil +} + +func (r *Plugin) Remove(ctx context.Context, id id.PluginID) error { + r.lock.Lock() + defer r.lock.Unlock() + + for i := 0; i < len(r.data); i++ { + if p := r.data[i]; p.ID().Equal(id) { + if s := p.ID().Scene(); s == nil || r.f.CanWrite(*s) { + r.data = append(r.data[:i], r.data[i+1:]...) + i-- + } + } + } + + return nil +} diff --git a/server/internal/infrastructure/memory/project.go b/server/internal/infrastructure/memory/project.go new file mode 100644 index 000000000..4c1563e44 --- /dev/null +++ b/server/internal/infrastructure/memory/project.go @@ -0,0 +1,136 @@ +package memory + +import ( + "context" + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type Project struct { + lock sync.Mutex + data map[id.ProjectID]*project.Project + f repo.TeamFilter +} + +func NewProject() repo.Project { + return &Project{ + data: map[id.ProjectID]*project.Project{}, + } +} + +func (r *Project) Filtered(f repo.TeamFilter) repo.Project { + return &Project{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: r.f.Merge(f), + } +} + +func (r *Project) FindByTeam(ctx context.Context, id id.TeamID, p *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := []*project.Project{} + for _, d := range r.data { + if d.Team() == id { + result = append(result, d) + } + } + + var startCursor, endCursor *usecase.Cursor + if len(result) > 0 { + _startCursor := usecase.Cursor(result[0].ID().String()) + _endCursor := usecase.Cursor(result[len(result)-1].ID().String()) + startCursor = &_startCursor + endCursor = &_endCursor + } + + return result, usecase.NewPageInfo( + len(r.data), + startCursor, + endCursor, + true, + true, + ), nil +} + +func (r *Project) FindByIDs(ctx context.Context, ids id.ProjectIDList) ([]*project.Project, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := []*project.Project{} + for _, id := range ids { + if d, ok := r.data[id]; ok && r.f.CanRead(d.Team()) { + result = append(result, d) + continue + } + result = append(result, nil) + } + return result, nil +} + +func (r *Project) FindByID(ctx context.Context, id id.ProjectID) (*project.Project, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if p, ok := r.data[id]; ok && r.f.CanRead(p.Team()) { + return p, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Project) FindByPublicName(ctx context.Context, name string) (*project.Project, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if name == "" { + return nil, nil + } + for _, p := range r.data { + if p.MatchWithPublicName(name) && r.f.CanRead(p.Team()) { + return p, nil + } + } + return nil, rerror.ErrNotFound +} + +func (r *Project) CountByTeam(ctx context.Context, team id.TeamID) (c int, err error) { + r.lock.Lock() + defer r.lock.Unlock() + + for _, p := range r.data { + if p.Team() == team && r.f.CanRead(p.Team()) { + c++ + } + } + return +} + +func (r *Project) Save(ctx context.Context, p *project.Project) error { + if !r.f.CanWrite(p.Team()) { + return repo.ErrOperationDenied + } + + r.lock.Lock() + defer r.lock.Unlock() + + p.SetUpdatedAt(time.Now()) + r.data[p.ID()] = p + return nil +} + +func (r *Project) Remove(ctx context.Context, id id.ProjectID) error { + r.lock.Lock() + defer r.lock.Unlock() + + if p, ok := r.data[id]; ok && r.f.CanRead(p.Team()) { + delete(r.data, id) + } + return nil +} diff --git a/server/internal/infrastructure/memory/property.go b/server/internal/infrastructure/memory/property.go new file mode 100644 index 000000000..2e63513d4 --- /dev/null +++ b/server/internal/infrastructure/memory/property.go @@ -0,0 +1,216 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +type Property struct { + lock sync.Mutex + data property.Map + f repo.SceneFilter +} + +func NewProperty() repo.Property { + return &Property{ + data: property.Map{}, + } +} + +func NewPropertyWith(items ...*property.Property) repo.Property { + r := NewProperty() + for _, i := range items { + _ = r.Save(nil, i) + } + return r +} + +func (r *Property) Filtered(f repo.SceneFilter) repo.Property { + return &Property{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: r.f.Merge(f), + } +} + +func (r *Property) FindByID(ctx context.Context, id id.PropertyID) (*property.Property, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if p, ok := r.data[id]; ok && r.f.CanRead(p.Scene()) { + return p, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Property) FindByIDs(ctx context.Context, ids id.PropertyIDList) (property.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := property.List{} + for _, id := range ids { + if d, ok := r.data[id]; ok && r.f.CanRead(d.Scene()) { + result = append(result, d) + continue + } + result = append(result, nil) + } + return result, nil +} + +func (r *Property) FindByDataset(ctx context.Context, sid id.DatasetSchemaID, did id.DatasetID) (property.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := property.List{} + for _, p := range r.data { + if p.IsDatasetLinked(sid, did) && r.f.CanRead(p.Scene()) { + result = append(result, p) + } + } + return result, nil +} + +func (r *Property) FindLinkedAll(ctx context.Context, s id.SceneID) (property.List, error) { + if !r.f.CanRead(s) { + return nil, nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + result := property.List{} + for _, p := range r.data { + if p.Scene() == s && p.HasLinkedField() { + result = append(result, p) + } + } + return result, nil +} + +func (r *Property) FindBySchema(_ context.Context, schemas []id.PropertySchemaID, scene id.SceneID) (property.List, error) { + if !r.f.CanRead(scene) { + return nil, nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + result := property.List{} + for _, p := range r.data { + if p.Scene() != scene { + continue + } + for _, s := range schemas { + if p.Schema().Equal(s) { + result = append(result, p) + break + } + } + } + result.Sort() + return result, nil +} + +func (r *Property) FindByPlugin(_ context.Context, plugin id.PluginID, scene id.SceneID) (property.List, error) { + if !r.f.CanRead(scene) { + return nil, nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + result := property.List{} + for _, p := range r.data { + if p.Scene() == scene && p.Schema().Plugin().Equal(plugin) { + result = append(result, p) + break + } + } + result.Sort() + return result, nil +} + +func (r *Property) Save(ctx context.Context, p *property.Property) error { + if !r.f.CanWrite(p.Scene()) { + return repo.ErrOperationDenied + } + + r.lock.Lock() + defer r.lock.Unlock() + + r.data[p.ID()] = p + return nil +} + +func (r *Property) SaveAll(ctx context.Context, pl property.List) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, p := range pl { + if r.f.CanWrite(p.Scene()) { + r.data[p.ID()] = p + } + } + return nil +} + +func (r *Property) UpdateSchemaPlugin(ctx context.Context, old id.PluginID, new id.PluginID, scene id.SceneID) error { + if !r.f.CanWrite(scene) { + return nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + for _, p := range r.data { + if s := p.Schema(); s.Plugin().Equal(old) && p.Scene() == scene { + p.SetSchema(id.NewPropertySchemaID(new, s.ID())) + } + } + return nil +} + +func (r *Property) Remove(ctx context.Context, id id.PropertyID) error { + r.lock.Lock() + defer r.lock.Unlock() + + if p, ok := r.data[id]; ok && r.f.CanWrite(p.Scene()) { + delete(r.data, id) + } + return nil +} + +func (r *Property) RemoveAll(ctx context.Context, ids id.PropertyIDList) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, id := range ids { + if p, ok := r.data[id]; ok && r.f.CanWrite(p.Scene()) { + delete(r.data, id) + } + } + return nil +} + +func (r *Property) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + for pid, p := range r.data { + if p.Scene() == sceneID { + delete(r.data, pid) + } + } + return nil +} diff --git a/server/internal/infrastructure/memory/property_schema.go b/server/internal/infrastructure/memory/property_schema.go new file mode 100644 index 000000000..f0e22b183 --- /dev/null +++ b/server/internal/infrastructure/memory/property_schema.go @@ -0,0 +1,147 @@ +package memory + +import ( + "context" + "errors" + "sync" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type PropertySchema struct { + lock sync.Mutex + data map[string]*property.Schema + f repo.SceneFilter +} + +func NewPropertySchema() repo.PropertySchema { + return &PropertySchema{} +} + +func NewPropertySchemaWith(items ...*property.Schema) repo.PropertySchema { + r := NewPropertySchema() + for _, i := range items { + _ = r.Save(nil, i) + } + return r +} + +func (r *PropertySchema) initMap() { + if r.data == nil { + r.data = map[string]*property.Schema{} + } +} + +func (r *PropertySchema) Filtered(f repo.SceneFilter) repo.PropertySchema { + return &PropertySchema{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: r.f.Merge(f), + } +} + +func (r *PropertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) (*property.Schema, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if ps := builtin.GetPropertySchema(id); ps != nil { + return ps, nil + } + + r.initMap() + p, ok := r.data[id.String()] + if ok { + if s := p.Scene(); s == nil || r.f.CanRead(*s) { + return p, nil + } + } + return nil, rerror.ErrNotFound +} + +func (r *PropertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { + r.lock.Lock() + defer r.lock.Unlock() + + r.initMap() + result := property.SchemaList{} + for _, id := range ids { + if ps := builtin.GetPropertySchema(id); ps != nil { + result = append(result, ps) + continue + } + if d, ok := r.data[id.String()]; ok { + if s := d.Scene(); s == nil || r.f.CanRead(*s) { + result = append(result, d) + } + } else { + result = append(result, nil) + } + } + return result, nil +} + +func (r *PropertySchema) Save(ctx context.Context, p *property.Schema) error { + if s := p.Scene(); s != nil && !r.f.CanWrite(*s) { + return repo.ErrOperationDenied + } + + r.lock.Lock() + defer r.lock.Unlock() + + r.initMap() + if p.ID().Plugin().System() { + return errors.New("cannnot save system property schema") + } + r.data[p.ID().String()] = p + return nil +} + +func (r *PropertySchema) SaveAll(ctx context.Context, p property.SchemaList) error { + r.lock.Lock() + defer r.lock.Unlock() + + r.initMap() + for _, p := range p { + if p.ID().Plugin().System() { + continue + } + if s := p.Scene(); s == nil || r.f.CanRead(*s) { + r.data[p.ID().String()] = p + } + } + return nil +} + +func (r *PropertySchema) Remove(ctx context.Context, id id.PropertySchemaID) error { + r.lock.Lock() + defer r.lock.Unlock() + + r.initMap() + + if d, ok := r.data[id.String()]; ok { + if s := d.Scene(); s == nil || r.f.CanRead(*s) { + delete(r.data, id.String()) + } + } + + return nil +} + +func (r *PropertySchema) RemoveAll(ctx context.Context, ids []id.PropertySchemaID) error { + r.lock.Lock() + defer r.lock.Unlock() + + r.initMap() + for _, id := range ids { + if d, ok := r.data[id.String()]; ok { + if s := d.Scene(); s == nil || r.f.CanRead(*s) { + delete(r.data, id.String()) + } + } + } + return nil +} diff --git a/server/internal/infrastructure/memory/scene.go b/server/internal/infrastructure/memory/scene.go new file mode 100644 index 000000000..4f29b502d --- /dev/null +++ b/server/internal/infrastructure/memory/scene.go @@ -0,0 +1,116 @@ +package memory + +import ( + "context" + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/user" +) + +type Scene struct { + lock sync.Mutex + data map[id.SceneID]*scene.Scene + f repo.TeamFilter +} + +func NewScene() repo.Scene { + return &Scene{ + data: map[id.SceneID]*scene.Scene{}, + } +} + +func NewSceneWith(items ...*scene.Scene) repo.Scene { + r := NewScene() + for _, i := range items { + _ = r.Save(nil, i) + } + return r +} + +func (r *Scene) Filtered(f repo.TeamFilter) repo.Scene { + return &Scene{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: r.f.Merge(f), + } +} + +func (r *Scene) FindByID(ctx context.Context, id id.SceneID) (*scene.Scene, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if s, ok := r.data[id]; ok && r.f.CanRead(s.Team()) { + return s, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Scene) FindByIDs(ctx context.Context, ids id.SceneIDList) (scene.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := scene.List{} + for _, id := range ids { + if d, ok := r.data[id]; ok && r.f.CanRead(d.Team()) { + result = append(result, d) + continue + } + result = append(result, nil) + + } + return result, nil +} + +func (r *Scene) FindByProject(ctx context.Context, id id.ProjectID) (*scene.Scene, error) { + r.lock.Lock() + defer r.lock.Unlock() + + for _, d := range r.data { + if d.Project() == id && r.f.CanRead(d.Team()) { + return d, nil + } + } + return nil, rerror.ErrNotFound +} + +func (r *Scene) FindByTeam(ctx context.Context, teams ...id.TeamID) (scene.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := scene.List{} + for _, d := range r.data { + if user.TeamIDList(teams).Has(d.Team()) && r.f.CanRead(d.Team()) { + result = append(result, d) + } + } + return result, nil +} + +func (r *Scene) Save(ctx context.Context, s *scene.Scene) error { + if !r.f.CanWrite(s.Team()) { + return repo.ErrOperationDenied + } + + r.lock.Lock() + defer r.lock.Unlock() + + s.SetUpdatedAt(time.Now()) + r.data[s.ID()] = s + return nil +} + +func (r *Scene) Remove(ctx context.Context, id id.SceneID) error { + r.lock.Lock() + defer r.lock.Unlock() + + if s, ok := r.data[id]; ok && r.f.CanWrite(s.Team()) { + delete(r.data, id) + } + + return nil +} diff --git a/server/internal/infrastructure/memory/scene_lock.go b/server/internal/infrastructure/memory/scene_lock.go new file mode 100644 index 000000000..3b65a725a --- /dev/null +++ b/server/internal/infrastructure/memory/scene_lock.go @@ -0,0 +1,66 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type sceneLock struct { + lock sync.Map +} + +func NewSceneLock() repo.SceneLock { + return &sceneLock{} +} + +func (r *sceneLock) GetLock(ctx context.Context, sceneID id.SceneID) (scene.LockMode, error) { + if sceneID.IsNil() { + return "", id.ErrInvalidID + } + if v, ok := r.lock.Load(sceneID); ok { + if v2, ok2 := v.(scene.LockMode); ok2 { + return v2, nil + } + } + return scene.LockModeFree, nil +} + +func (r *sceneLock) GetAllLock(ctx context.Context, sceneID id.SceneIDList) ([]scene.LockMode, error) { + res := make([]scene.LockMode, 0, len(sceneID)) + for _, si := range sceneID { + if si.IsNil() { + return nil, id.ErrInvalidID + } + if v, ok := r.lock.Load(si); ok { + if v2, ok2 := v.(scene.LockMode); ok2 { + res = append(res, v2) + } else { + res = append(res, scene.LockModeFree) + } + } else { + res = append(res, scene.LockModeFree) + } + } + return res, nil +} + +func (r *sceneLock) SaveLock(ctx context.Context, sceneID id.SceneID, lock scene.LockMode) error { + if lock == scene.LockModeFree { + r.lock.Delete(sceneID) + } else { + r.lock.Store(sceneID, lock) + } + return nil +} + +func (r *sceneLock) ReleaseAllLock(ctx context.Context) error { + r.lock.Range(func(key interface{}, value interface{}) bool { + r.lock.Delete(key) + return true + }) + return nil +} diff --git a/server/internal/infrastructure/memory/tag.go b/server/internal/infrastructure/memory/tag.go new file mode 100644 index 000000000..300fcce87 --- /dev/null +++ b/server/internal/infrastructure/memory/tag.go @@ -0,0 +1,213 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/pkg/rerror" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" +) + +type Tag struct { + lock sync.Mutex + data tag.Map + f repo.SceneFilter +} + +func NewTag() repo.Tag { + return &Tag{ + data: map[id.TagID]tag.Tag{}, + } +} + +func (r *Tag) Filtered(f repo.SceneFilter) repo.Tag { + return &Tag{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: r.f.Merge(f), + } +} + +func (r *Tag) FindByID(ctx context.Context, tagID id.TagID) (tag.Tag, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if res, ok := r.data[tagID]; ok && r.f.CanRead(res.Scene()) { + return res, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Tag) FindByIDs(ctx context.Context, tids id.TagIDList) ([]*tag.Tag, error) { + r.lock.Lock() + defer r.lock.Unlock() + + var res []*tag.Tag + for _, id := range tids { + if d, ok := r.data[id]; ok && r.f.CanRead(d.Scene()) { + res = append(res, &d) + continue + } + res = append(res, nil) + } + return res, nil +} + +func (r *Tag) FindByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Tag, error) { + if !r.f.CanRead(sceneID) { + return nil, nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + return r.data.All().FilterByScene(sceneID).Refs(), nil +} + +func (r *Tag) FindItemByID(ctx context.Context, tagID id.TagID) (*tag.Item, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if d, ok := r.data[tagID]; ok { + if res := tag.ItemFrom(d); res != nil && r.f.CanRead(res.Scene()) { + return res, nil + } + } + return nil, rerror.ErrNotFound +} + +func (r *Tag) FindItemByIDs(ctx context.Context, tagIDs id.TagIDList) ([]*tag.Item, error) { + r.lock.Lock() + defer r.lock.Unlock() + + var res []*tag.Item + for _, id := range tagIDs { + if d, ok := r.data[id]; ok { + if ti := tag.ItemFrom(d); ti != nil && r.f.CanRead(ti.Scene()) { + res = append(res, ti) + } + } + } + return res, nil +} + +func (r *Tag) FindGroupByID(ctx context.Context, tagID id.TagID) (*tag.Group, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if d, ok := r.data[tagID]; ok { + if tg := tag.GroupFrom(d); tg != nil && r.f.CanRead(tg.Scene()) { + return tg, nil + } + } + return nil, rerror.ErrNotFound +} + +func (r *Tag) FindGroupByIDs(ctx context.Context, tagIDs id.TagIDList) ([]*tag.Group, error) { + r.lock.Lock() + defer r.lock.Unlock() + + var res []*tag.Group + for _, id := range tagIDs { + if d, ok := r.data[id]; ok { + if tg := tag.GroupFrom(d); tg != nil && r.f.CanRead(tg.Scene()) { + res = append(res, tg) + } + } + } + return res, nil +} + +func (r *Tag) FindRootsByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Tag, error) { + if !r.f.CanRead(sceneID) { + return nil, nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + return r.data.All().FilterByScene(sceneID).Roots().Refs(), nil +} + +func (r *Tag) FindGroupByItem(ctx context.Context, tagID id.TagID) (*tag.Group, error) { + r.lock.Lock() + defer r.lock.Unlock() + + for _, tg := range r.data { + if res := tag.GroupFrom(tg); res != nil { + for _, item := range res.Tags() { + if item == tagID { + return res, nil + } + } + } + } + + return nil, rerror.ErrNotFound +} + +func (r *Tag) Save(ctx context.Context, tag tag.Tag) error { + if !r.f.CanWrite(tag.Scene()) { + return repo.ErrOperationDenied + } + + r.lock.Lock() + defer r.lock.Unlock() + + r.data[tag.ID()] = tag + return nil +} + +func (r *Tag) SaveAll(ctx context.Context, tags []*tag.Tag) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, tagRef := range tags { + tag := *tagRef + if r.f.CanWrite(tag.Scene()) { + r.data[tag.ID()] = tag + } + } + return nil +} + +func (r *Tag) Remove(ctx context.Context, id id.TagID) error { + r.lock.Lock() + defer r.lock.Unlock() + + if t, ok := r.data[id]; ok && r.f.CanWrite(t.Scene()) { + delete(r.data, id) + } + return nil +} + +func (r *Tag) RemoveAll(ctx context.Context, ids id.TagIDList) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, id := range ids { + if t, ok := r.data[id]; ok && r.f.CanWrite(t.Scene()) { + delete(r.data, id) + } + } + return nil +} + +func (r *Tag) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + + r.lock.Lock() + defer r.lock.Unlock() + + for tid, v := range r.data { + if v.Scene() == sceneID { + delete(r.data, tid) + } + } + return nil +} diff --git a/server/internal/infrastructure/memory/tag_test.go b/server/internal/infrastructure/memory/tag_test.go new file mode 100644 index 000000000..82130cf20 --- /dev/null +++ b/server/internal/infrastructure/memory/tag_test.go @@ -0,0 +1,286 @@ +package memory + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/tag" + "github.com/stretchr/testify/assert" +) + +func TestNewTag(t *testing.T) { + repo := NewTag() + assert.NotNil(t, repo) + +} + +func TestTag_FindByID(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tti := tag.Tag(t1) + repo := Tag{ + data: map[id.TagID]tag.Tag{t1.ID(): tti}, + } + out, err := repo.FindByID(ctx, t1.ID()) + assert.NoError(t, err) + assert.Equal(t, tti, out) + + _, err = repo.FindByID(ctx, id.TagID{}) + assert.Same(t, rerror.ErrNotFound, err) +} + +func TestTag_FindByIDs(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sid2 := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := id.TagIDList{t1.ID()} + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + t3, _ := tag.NewItem().NewID().Scene(sid2).Label("item2").Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t3) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + t3.ID(): tti2, + }, + } + out, err := repo.FindByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}) + assert.NoError(t, err) + assert.Equal(t, []*tag.Tag{&tti, &ttg}, out) +} + +func TestTag_FindRootsByScene(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sid2 := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := id.TagIDList{t1.ID()} + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + t3, _ := tag.NewItem().NewID().Scene(sid2).Label("item2").Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t3) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + t3.ID(): tti2, + }, + } + out, err := repo.FindRootsByScene(ctx, sid2) + assert.NoError(t, err) + assert.Equal(t, []*tag.Tag{&tti2}, out) +} + +func TestTag_FindGroupByID(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := id.TagIDList{t1.ID()} + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + tti := tag.Tag(t1) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + }, + } + out, err := repo.FindGroupByID(ctx, t2.ID()) + assert.NoError(t, err) + assert.Equal(t, t2, out) + + _, err = repo.FindGroupByID(ctx, id.TagID{}) + assert.Same(t, rerror.ErrNotFound, err) +} + +func TestTag_FindItemByID(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := id.TagIDList{t1.ID()} + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + tti := tag.Tag(t1) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + }, + } + out, err := repo.FindItemByID(ctx, t1.ID()) + assert.NoError(t, err) + assert.Equal(t, t1, out) + + _, err = repo.FindItemByID(ctx, id.TagID{}) + assert.Same(t, rerror.ErrNotFound, err) +} + +func TestTag_FindGroupByIDs(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + t1, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Build() + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group2").Build() + ttg := tag.Tag(t1) + ttg2 := tag.Tag(t2) + r := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): ttg, + t2.ID(): ttg2, + }, + } + out, err := r.FindGroupByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}) + assert.NoError(t, err) + assert.Equal(t, []*tag.Group{t1, t2}, out) + + out, err = r.Filtered(repo.SceneFilter{ + Readable: []id.SceneID{}, + }).FindGroupByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}) + assert.NoError(t, err) + assert.Equal(t, 0, len(out)) +} + +func TestTag_FindItemByIDs(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + t2, _ := tag.NewItem().NewID().Scene(sid).Label("item2").Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t2) + r := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): tti2, + }, + } + out, err := r.FindItemByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}) + assert.NoError(t, err) + assert.Equal(t, []*tag.Item{t1, t2}, out) + + out, err = r.Filtered(repo.SceneFilter{ + Readable: []id.SceneID{}, + }).FindItemByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}) + assert.NoError(t, err) + assert.Equal(t, 0, len(out)) +} + +func TestTag_Save(t *testing.T) { + ctx := context.Background() + repo := NewTag() + sid := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tti := tag.Tag(t1) + + err := repo.Save(ctx, tti) + assert.NoError(t, err) + out, _ := repo.FindByID(ctx, t1.ID()) + assert.Equal(t, tti, out) +} + +func TestTag_SaveAll(t *testing.T) { + ctx := context.Background() + repo := NewTag() + sid := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + t2, _ := tag.NewItem().NewID().Scene(sid).Label("item2").Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t2) + + err := repo.SaveAll(ctx, []*tag.Tag{&tti, &tti2}) + assert.NoError(t, err) + out, _ := repo.FindByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}) + assert.Equal(t, []*tag.Tag{&tti, &tti2}, out) +} + +func TestTag_Remove(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := id.TagIDList{t1.ID()} + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + tti := tag.Tag(t1) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + }, + } + err := repo.Remove(ctx, t1.ID()) + assert.NoError(t, err) + out, _ := repo.FindRootsByScene(ctx, sid) + assert.Equal(t, []*tag.Tag{&ttg}, out) +} + +func TestTag_RemoveAll(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := id.TagIDList{t1.ID()} + t2, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + t3, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t2) + ttg := tag.Tag(t3) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): tti2, + t3.ID(): ttg, + }, + } + err := repo.RemoveAll(ctx, []id.TagID{t1.ID(), t3.ID()}) + assert.NoError(t, err) + out, _ := repo.FindRootsByScene(ctx, sid) + assert.Equal(t, []*tag.Tag{&tti2}, out) +} + +func TestTag_RemoveByScene(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sid2 := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := id.TagIDList{t1.ID()} + t2, _ := tag.NewItem().NewID().Scene(sid2).Label("item").Build() + t3, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t2) + ttg := tag.Tag(t3) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): tti2, + t3.ID(): ttg, + }, + } + err := repo.RemoveByScene(ctx, sid) + assert.NoError(t, err) + out, _ := repo.FindRootsByScene(ctx, sid2) + assert.Equal(t, []*tag.Tag{&tti2}, out) +} + +func TestTag_FindGroupByItem(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := id.TagIDList{t1.ID()} + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + tti := tag.Tag(t1) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + }, + } + out, err := repo.FindGroupByItem(ctx, t1.ID()) + assert.NoError(t, err) + assert.Equal(t, t2, out) +} diff --git a/server/internal/infrastructure/memory/team.go b/server/internal/infrastructure/memory/team.go new file mode 100644 index 000000000..625baeded --- /dev/null +++ b/server/internal/infrastructure/memory/team.go @@ -0,0 +1,97 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/user" +) + +type Team struct { + lock sync.Mutex + data map[id.TeamID]*user.Team +} + +func NewTeam() repo.Team { + return &Team{ + data: map[id.TeamID]*user.Team{}, + } +} + +func (r *Team) FindByUser(ctx context.Context, i id.UserID) (user.TeamList, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := user.TeamList{} + for _, d := range r.data { + if d.Members().ContainsUser(i) { + result = append(result, d) + } + } + return result, nil +} + +func (r *Team) FindByIDs(ctx context.Context, ids id.TeamIDList) (user.TeamList, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := user.TeamList{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + result = append(result, d) + } else { + result = append(result, nil) + } + } + return result, nil +} + +func (r *Team) FindByID(ctx context.Context, id id.TeamID) (*user.Team, error) { + r.lock.Lock() + defer r.lock.Unlock() + + d, ok := r.data[id] + if ok { + return d, nil + } + return nil, rerror.ErrNotFound +} + +func (r *Team) Save(ctx context.Context, t *user.Team) error { + r.lock.Lock() + defer r.lock.Unlock() + + r.data[t.ID()] = t + return nil +} + +func (r *Team) SaveAll(ctx context.Context, teams []*user.Team) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, t := range teams { + r.data[t.ID()] = t + } + return nil +} + +func (r *Team) Remove(ctx context.Context, id id.TeamID) error { + r.lock.Lock() + defer r.lock.Unlock() + + delete(r.data, id) + return nil +} + +func (r *Team) RemoveAll(ctx context.Context, ids id.TeamIDList) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, id := range ids { + delete(r.data, id) + } + return nil +} diff --git a/server/internal/infrastructure/memory/transaction.go b/server/internal/infrastructure/memory/transaction.go new file mode 100644 index 000000000..6c57908db --- /dev/null +++ b/server/internal/infrastructure/memory/transaction.go @@ -0,0 +1,60 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +type Transaction struct { + committed int + beginerror error + enderror error +} + +type Tx struct { + t *Transaction + committed bool + enderror error +} + +func NewTransaction() *Transaction { + return &Transaction{} +} + +func (t *Transaction) SetBeginError(err error) { + t.beginerror = err +} + +func (t *Transaction) SetEndError(err error) { + t.enderror = err +} + +func (t *Transaction) Committed() int { + return t.committed +} + +func (t *Transaction) Begin() (repo.Tx, error) { + if t.beginerror != nil { + return nil, t.beginerror + } + return &Tx{t: t, enderror: t.enderror}, nil +} + +func (t *Tx) Commit() { + t.committed = true +} + +func (t *Tx) End(_ context.Context) error { + if t.enderror != nil { + return t.enderror + } + if t.t != nil && t.committed { + t.t.committed++ + } + return nil +} + +func (t *Tx) IsCommitted() bool { + return t.committed +} diff --git a/server/internal/infrastructure/memory/transaction_test.go b/server/internal/infrastructure/memory/transaction_test.go new file mode 100644 index 000000000..47a3bebce --- /dev/null +++ b/server/internal/infrastructure/memory/transaction_test.go @@ -0,0 +1,38 @@ +package memory + +import ( + "context" + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTransaction_Committed(t *testing.T) { + tr := NewTransaction() + tx, err := tr.Begin() + assert.NoError(t, err) + assert.Equal(t, 0, tr.Committed()) + tx.Commit() + assert.Equal(t, 0, tr.Committed()) + assert.NoError(t, tx.End(context.Background())) + assert.Equal(t, 1, tr.Committed()) +} + +func TestTransaction_SetBeginError(t *testing.T) { + err := errors.New("a") + tr := NewTransaction() + tr.SetBeginError(err) + tx, err2 := tr.Begin() + assert.Nil(t, tx) + assert.Same(t, err, err2) +} + +func TestTransaction_SetEndError(t *testing.T) { + err := errors.New("a") + tr := NewTransaction() + tr.SetEndError(err) + tx, err2 := tr.Begin() + assert.NoError(t, err2) + assert.Same(t, err, tx.End(context.Background())) +} diff --git a/server/internal/infrastructure/memory/user.go b/server/internal/infrastructure/memory/user.go new file mode 100644 index 000000000..16512cbbf --- /dev/null +++ b/server/internal/infrastructure/memory/user.go @@ -0,0 +1,167 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/user" +) + +type User struct { + lock sync.Mutex + data map[id.UserID]*user.User +} + +func NewUser() repo.User { + return &User{ + data: map[id.UserID]*user.User{}, + } +} + +func (r *User) FindByIDs(ctx context.Context, ids id.UserIDList) ([]*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := []*user.User{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + result = append(result, d) + } else { + result = append(result, nil) + } + } + return result, nil +} + +func (r *User) FindByID(ctx context.Context, id id.UserID) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + + d, ok := r.data[id] + if ok { + return d, nil + } + return nil, rerror.ErrNotFound +} + +func (r *User) Save(ctx context.Context, u *user.User) error { + r.lock.Lock() + defer r.lock.Unlock() + + r.data[u.ID()] = u + return nil +} + +func (r *User) FindByAuth0Sub(ctx context.Context, auth0sub string) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if auth0sub == "" { + return nil, rerror.ErrInvalidParams + } + + for _, u := range r.data { + if u.ContainAuth(user.AuthFromAuth0Sub(auth0sub)) { + return u, nil + } + } + + return nil, rerror.ErrNotFound +} + +func (r *User) FindByPasswordResetRequest(ctx context.Context, token string) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if token == "" { + return nil, rerror.ErrInvalidParams + } + + for _, u := range r.data { + pwdReq := u.PasswordReset() + if pwdReq != nil && pwdReq.Token == token { + return u, nil + } + } + + return nil, rerror.ErrNotFound +} + +func (r *User) FindByEmail(ctx context.Context, email string) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if email == "" { + return nil, rerror.ErrInvalidParams + } + + for _, u := range r.data { + if u.Email() == email { + return u, nil + } + } + + return nil, rerror.ErrNotFound +} + +func (r *User) FindByName(ctx context.Context, name string) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if name == "" { + return nil, rerror.ErrInvalidParams + } + + for _, u := range r.data { + if u.Name() == name { + return u, nil + } + } + + return nil, rerror.ErrNotFound +} + +func (r *User) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if nameOrEmail == "" { + return nil, rerror.ErrInvalidParams + } + + for _, u := range r.data { + if u.Email() == nameOrEmail || u.Name() == nameOrEmail { + return u, nil + } + } + + return nil, rerror.ErrNotFound +} + +func (r *User) Remove(ctx context.Context, user id.UserID) error { + r.lock.Lock() + defer r.lock.Unlock() + + delete(r.data, user) + return nil +} + +func (r *User) FindByVerification(ctx context.Context, code string) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if code == "" { + return nil, rerror.ErrInvalidParams + } + + for _, u := range r.data { + if u.Verification() != nil && u.Verification().Code() == code { + return u, nil + } + } + + return nil, rerror.ErrNotFound +} diff --git a/server/internal/infrastructure/mongo/asset.go b/server/internal/infrastructure/mongo/asset.go new file mode 100644 index 000000000..46a78c4e3 --- /dev/null +++ b/server/internal/infrastructure/mongo/asset.go @@ -0,0 +1,155 @@ +package mongo + +import ( + "context" + "fmt" + "regexp" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +type assetRepo struct { + client *mongodoc.ClientCollection + f repo.TeamFilter +} + +func NewAsset(client *mongodoc.Client) repo.Asset { + r := &assetRepo{client: client.WithCollection("asset")} + r.init() + return r +} + +func (r *assetRepo) Filtered(f repo.TeamFilter) repo.Asset { + return &assetRepo{ + client: r.client, + f: r.f.Merge(f), + } +} + +func (r *assetRepo) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) +} + +func (r *assetRepo) FindByIDs(ctx context.Context, ids id.AssetIDList) ([]*asset.Asset, error) { + if len(ids) == 0 { + return nil, nil + } + + filter := bson.M{ + "id": bson.M{"$in": ids.Strings()}, + } + dst := make([]*asset.Asset, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterAssets(ids, res), nil +} + +func (r *assetRepo) FindByTeam(ctx context.Context, id id.TeamID, uFilter repo.AssetFilter) ([]*asset.Asset, *usecase.PageInfo, error) { + if !r.f.CanRead(id) { + return nil, usecase.EmptyPageInfo(), nil + } + + var filter interface{} = bson.M{ + "team": id.String(), + } + + if uFilter.Keyword != nil { + filter = mongodoc.And(filter, "name", bson.M{ + "$regex": primitive.Regex{Pattern: fmt.Sprintf(".*%s.*", regexp.QuoteMeta(*uFilter.Keyword)), Options: "i"}, + }) + } + + return r.paginate(ctx, filter, uFilter.Sort, uFilter.Pagination) +} + +func (r *assetRepo) Save(ctx context.Context, asset *asset.Asset) error { + if !r.f.CanWrite(asset.Team()) { + return repo.ErrOperationDenied + } + doc, id := mongodoc.NewAsset(asset) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *assetRepo) Remove(ctx context.Context, id id.AssetID) error { + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{ + "id": id.String(), + })) +} + +func (r *assetRepo) init() { + i := r.client.CreateIndex(context.Background(), []string{"team"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "asset", i) + } +} + +func (r *assetRepo) paginate(ctx context.Context, filter interface{}, sort *asset.SortType, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { + var sortstr *string + if sort != nil { + sortstr2 := string(*sort) + sortstr = &sortstr2 + } + + var c mongodoc.AssetConsumer + pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), sortstr, pagination, &c) + if err != nil { + return nil, nil, rerror.ErrInternalBy(err) + } + return c.Rows, pageInfo, nil +} + +func (r *assetRepo) find(ctx context.Context, dst []*asset.Asset, filter interface{}) ([]*asset.Asset, error) { + c := mongodoc.AssetConsumer{ + Rows: dst, + } + if err2 := r.client.Find(ctx, r.readFilter(filter), &c); err2 != nil { + return nil, rerror.ErrInternalBy(err2) + } + return c.Rows, nil +} + +func (r *assetRepo) findOne(ctx context.Context, filter interface{}) (*asset.Asset, error) { + dst := make([]*asset.Asset, 0, 1) + c := mongodoc.AssetConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func filterAssets(ids []id.AssetID, rows []*asset.Asset) []*asset.Asset { + res := make([]*asset.Asset, 0, len(ids)) + for _, id := range ids { + var r2 *asset.Asset + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (r *assetRepo) readFilter(filter interface{}) interface{} { + return applyTeamFilter(filter, r.f.Readable) +} + +func (r *assetRepo) writeFilter(filter interface{}) interface{} { + return applyTeamFilter(filter, r.f.Writable) +} diff --git a/server/internal/infrastructure/mongo/asset_test.go b/server/internal/infrastructure/mongo/asset_test.go new file mode 100644 index 000000000..8a41d87f3 --- /dev/null +++ b/server/internal/infrastructure/mongo/asset_test.go @@ -0,0 +1,65 @@ +package mongo + +import ( + "context" + "testing" + "time" + + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestFindByID(t *testing.T) { + tests := []struct { + Name string + Expected struct { + Name string + Asset *asset.Asset + } + }{ + { + Expected: struct { + Name string + Asset *asset.Asset + }{ + Asset: asset.New(). + NewID(). + CreatedAt(time.Now()). + Team(id.NewTeamID()). + Name("name"). + Size(10). + URL("hxxps://https://reearth.io/"). + ContentType("json"). + MustBuild(), + }, + }, + } + + init := connect(t) + + for _, tc := range tests { + tc := tc + + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + client := init(t) + + repo := NewAsset(client) + ctx := context.Background() + err := repo.Save(ctx, tc.Expected.Asset) + assert.NoError(t, err) + + got, err := repo.FindByID(ctx, tc.Expected.Asset.ID()) + assert.NoError(t, err) + assert.Equal(t, tc.Expected.Asset.ID(), got.ID()) + assert.Equal(t, tc.Expected.Asset.CreatedAt(), got.CreatedAt()) + assert.Equal(t, tc.Expected.Asset.Team(), got.Team()) + assert.Equal(t, tc.Expected.Asset.URL(), got.URL()) + assert.Equal(t, tc.Expected.Asset.Size(), got.Size()) + assert.Equal(t, tc.Expected.Asset.Name(), got.Name()) + assert.Equal(t, tc.Expected.Asset.ContentType(), got.ContentType()) + }) + } +} diff --git a/server/internal/infrastructure/mongo/auth_request.go b/server/internal/infrastructure/mongo/auth_request.go new file mode 100644 index 000000000..b0d9bf503 --- /dev/null +++ b/server/internal/infrastructure/mongo/auth_request.go @@ -0,0 +1,61 @@ +package mongo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/auth" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "go.mongodb.org/mongo-driver/bson" +) + +type authRequestRepo struct { + client *mongodoc.ClientCollection +} + +func NewAuthRequest(client *mongodoc.Client) repo.AuthRequest { + r := &authRequestRepo{client: client.WithCollection("authRequest")} + r.init() + return r +} + +func (r *authRequestRepo) init() { + i := r.client.CreateIndex(context.Background(), []string{"code", "subject"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "authRequest", i) + } +} + +func (r *authRequestRepo) FindByID(ctx context.Context, id2 id.AuthRequestID) (*auth.Request, error) { + return r.findOne(ctx, bson.M{"id": id2.String()}) +} + +func (r *authRequestRepo) FindByCode(ctx context.Context, s string) (*auth.Request, error) { + return r.findOne(ctx, bson.M{"code": s}) +} + +func (r *authRequestRepo) FindBySubject(ctx context.Context, s string) (*auth.Request, error) { + return r.findOne(ctx, bson.M{"subject": s}) +} + +func (r *authRequestRepo) Save(ctx context.Context, request *auth.Request) error { + doc, id1 := mongodoc.NewAuthRequest(request) + return r.client.SaveOne(ctx, id1, doc) +} + +func (r *authRequestRepo) Remove(ctx context.Context, requestID id.AuthRequestID) error { + return r.client.RemoveOne(ctx, bson.M{"id": requestID.String()}) +} + +func (r *authRequestRepo) findOne(ctx context.Context, filter interface{}) (*auth.Request, error) { + dst := make([]*auth.Request, 0, 1) + c := mongodoc.AuthRequestConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} diff --git a/server/internal/infrastructure/mongo/auth_request_test.go b/server/internal/infrastructure/mongo/auth_request_test.go new file mode 100644 index 000000000..6d6065e26 --- /dev/null +++ b/server/internal/infrastructure/mongo/auth_request_test.go @@ -0,0 +1,72 @@ +package mongo + +import ( + "context" + "testing" + + "github.com/caos/oidc/pkg/oidc" + "github.com/reearth/reearth-backend/pkg/auth" + "github.com/stretchr/testify/assert" +) + +func TestAuthRequestRepo(t *testing.T) { + tests := []struct { + Name string + Expected struct { + Name string + AuthRequest *auth.Request + } + }{ + { + Expected: struct { + Name string + AuthRequest *auth.Request + }{ + AuthRequest: auth.NewRequest(). + NewID(). + ClientID("client id"). + State("state"). + ResponseType("response type"). + Scopes([]string{"scope"}). + Audiences([]string{"audience"}). + RedirectURI("redirect uri"). + Nonce("nonce"). + CodeChallenge(&oidc.CodeChallenge{ + Challenge: "challenge", + Method: "S256", + }). + AuthorizedAt(nil). + MustBuild(), + }, + }, + } + + init := connect(t) + + for _, tt := range tests { + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + client := init(t) + + repo := NewAuthRequest(client) + + ctx := context.Background() + err := repo.Save(ctx, tt.Expected.AuthRequest) + assert.NoError(t, err) + + got, err := repo.FindByID(ctx, tt.Expected.AuthRequest.ID()) + assert.NoError(t, err) + assert.Equal(t, tt.Expected.AuthRequest.ID(), got.ID()) + assert.Equal(t, tt.Expected.AuthRequest.GetClientID(), got.GetClientID()) + assert.Equal(t, tt.Expected.AuthRequest.GetState(), got.GetState()) + assert.Equal(t, tt.Expected.AuthRequest.GetResponseType(), got.GetResponseType()) + assert.Equal(t, tt.Expected.AuthRequest.GetScopes(), got.GetScopes()) + assert.Equal(t, tt.Expected.AuthRequest.GetAudience(), got.GetAudience()) + assert.Equal(t, tt.Expected.AuthRequest.GetRedirectURI(), got.GetRedirectURI()) + assert.Equal(t, tt.Expected.AuthRequest.GetNonce(), got.GetNonce()) + assert.Equal(t, tt.Expected.AuthRequest.GetCodeChallenge(), got.GetCodeChallenge()) + assert.Equal(t, tt.Expected.AuthRequest.AuthorizedAt(), got.AuthorizedAt()) + }) + } +} diff --git a/server/internal/infrastructure/mongo/config.go b/server/internal/infrastructure/mongo/config.go new file mode 100644 index 000000000..a78b57a1a --- /dev/null +++ b/server/internal/infrastructure/mongo/config.go @@ -0,0 +1,69 @@ +package mongo + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/config" + "github.com/reearth/reearth-backend/pkg/rerror" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +const configLockName = "config" + +type configRepo struct { + client *mongodoc.ClientCollection + lock repo.Lock +} + +func NewConfig(client *mongodoc.Client, lock repo.Lock) repo.Config { + return &configRepo{client: client.WithCollection("config"), lock: lock} +} + +func (r *configRepo) LockAndLoad(ctx context.Context) (cfg *config.Config, err error) { + if err := r.lock.Lock(ctx, configLockName); err != nil { + return nil, err + } + + cfgd := &mongodoc.ConfigDocument{} + if err := r.client.Collection().FindOne(ctx, bson.M{}).Decode(cfgd); err != nil { + if !errors.Is(err, mongo.ErrNilDocument) && !errors.Is(err, mongo.ErrNoDocuments) { + return nil, rerror.ErrInternalBy(err) + } + } + return cfgd.Model(), nil +} + +func (r *configRepo) Save(ctx context.Context, cfg *config.Config) error { + if cfg != nil { + if _, err := r.client.Collection().UpdateOne( + ctx, + bson.M{}, + bson.M{"$set": mongodoc.NewConfig(*cfg)}, + (&options.UpdateOptions{}).SetUpsert(true), + ); err != nil { + return rerror.ErrInternalBy(err) + } + } + + return nil +} + +func (r *configRepo) SaveAndUnlock(ctx context.Context, cfg *config.Config) error { + if err := r.Save(ctx, cfg); err != nil { + return err + } + return r.Unlock(ctx) +} + +func (r *configRepo) Unlock(ctx context.Context) error { + if err := r.lock.Unlock(ctx, configLockName); err != nil && !errors.Is(err, repo.ErrNotLocked) { + return err + } + + return nil +} diff --git a/server/internal/infrastructure/mongo/container.go b/server/internal/infrastructure/mongo/container.go new file mode 100644 index 000000000..a890f3343 --- /dev/null +++ b/server/internal/infrastructure/mongo/container.go @@ -0,0 +1,76 @@ +package mongo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/migration" + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/user" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +func InitRepos(ctx context.Context, c *repo.Container, mc *mongo.Client, databaseName string) error { + if databaseName == "" { + databaseName = "reearth" + } + + lock, err := NewLock(mc.Database(databaseName).Collection("locks")) + if err != nil { + return err + } + + client := mongodoc.NewClient(databaseName, mc) + c.Asset = NewAsset(client) + c.AuthRequest = NewAuthRequest(client) + c.Config = NewConfig(client, lock) + c.DatasetSchema = NewDatasetSchema(client) + c.Dataset = NewDataset(client) + c.Layer = NewLayer(client) + c.Plugin = NewPlugin(client) + c.Project = NewProject(client) + c.PropertySchema = NewPropertySchema(client) + c.Property = NewProperty(client) + c.Scene = NewScene(client) + c.Tag = NewTag(client) + c.Team = NewTeam(client) + c.User = NewUser(client) + c.SceneLock = NewSceneLock(client) + c.Transaction = NewTransaction(client) + c.Lock = lock + + // migration + m := migration.Client{Client: client, Config: c.Config} + if err := m.Migrate(ctx); err != nil { + return err + } + + return nil +} + +func applyTeamFilter(filter interface{}, ids user.TeamIDList) interface{} { + if ids == nil { + return filter + } + return mongodoc.And(filter, "team", bson.M{"$in": ids.Strings()}) +} + +func applySceneFilter(filter interface{}, ids scene.IDList) interface{} { + if ids == nil { + return filter + } + return mongodoc.And(filter, "scene", bson.M{"$in": ids.Strings()}) +} + +func applyOptionalSceneFilter(filter interface{}, ids scene.IDList) interface{} { + if ids == nil { + return filter + } + return mongodoc.And(filter, "", bson.M{"$or": []bson.M{ + {"scene": bson.M{"$in": ids.Strings()}}, + {"scene": nil}, + {"scene": ""}, + }}) +} diff --git a/server/internal/infrastructure/mongo/dataset.go b/server/internal/infrastructure/mongo/dataset.go new file mode 100644 index 000000000..1344fdcb7 --- /dev/null +++ b/server/internal/infrastructure/mongo/dataset.go @@ -0,0 +1,365 @@ +package mongo + +import ( + "context" + "errors" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" + + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +type datasetRepo struct { + client *mongodoc.ClientCollection + f repo.SceneFilter +} + +func NewDataset(client *mongodoc.Client) repo.Dataset { + r := &datasetRepo{client: client.WithCollection("dataset")} + r.init() + return r +} + +func (r *datasetRepo) Filtered(f repo.SceneFilter) repo.Dataset { + return &datasetRepo{ + client: r.client, + f: r.f.Merge(f), + } +} + +func (r *datasetRepo) init() { + i := r.client.CreateIndex(context.Background(), []string{"scene", "schema"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "dataset", i) + } +} + +func (r *datasetRepo) FindByID(ctx context.Context, id id.DatasetID) (*dataset.Dataset, error) { + return r.findOne(ctx, bson.M{"id": id.String()}) +} + +func (r *datasetRepo) FindByIDs(ctx context.Context, ids id.DatasetIDList) (dataset.List, error) { + if len(ids) == 0 { + return nil, nil + } + + filter := bson.M{ + "id": bson.M{ + "$in": ids.Strings(), + }, + } + dst := make([]*dataset.Dataset, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterDatasets(ids, res), nil +} + +func (r *datasetRepo) FindBySchema(ctx context.Context, schemaID id.DatasetSchemaID, pagination *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { + return r.paginate(ctx, bson.M{ + "schema": schemaID.String(), + }, pagination) +} + +func (r *datasetRepo) CountBySchema(ctx context.Context, id id.DatasetSchemaID) (int, error) { + res, err := r.client.Count(ctx, r.readFilter(bson.M{ + "schema": id.String(), + })) + if err != nil { + return 0, err + } + return int(res), nil +} + +func (r *datasetRepo) FindBySchemaAll(ctx context.Context, schemaID id.DatasetSchemaID) (dataset.List, error) { + return r.find(ctx, nil, bson.M{ + "schema": schemaID.String(), + }) +} + +func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, fields id.DatasetFieldIDList) (dataset.List, error) { + if len(fields) == 0 { + d, err := r.FindByID(ctx, did) + if err != nil { + return nil, err + } + return dataset.List{d}, nil + } + + fieldsstr := fields.Strings() + firstField := fieldsstr[0] + + aggfilter := bson.D{} + if r.f.Readable != nil { + aggfilter = append(aggfilter, bson.E{Key: "$in", Value: []interface{}{ + "$$g.scene", + r.f.Readable.Strings(), + }}) + } + + pipeline := bson.D{ + {Key: "$match", Value: r.readFilter(bson.M{ + "id": did.String(), + "fields.id": firstField, + })}, + {Key: "$limit", Value: 1}, + {Key: "$addFields", Value: bson.D{ + {Key: "field", Value: bson.D{ + {Key: "$arrayElemAt", Value: []interface{}{ + bson.D{{Key: "$filter", Value: bson.D{ + {Key: "input", Value: "$fields"}, + {Key: "as", Value: "f"}, + {Key: "cond", Value: bson.D{ + {Key: "$and", Value: []bson.D{ + {{Key: "$eq", Value: []string{"$$f.id", firstField}}}, + {{Key: "$eq", Value: []string{"$$f.type", "ref"}}}, + }}, + }}, + }}}, + 0, + }}, + }}, + }}, + {Key: "$graphLookup", Value: bson.D{ + {Key: "from", Value: "dataset"}, + {Key: "startWith", Value: "$field.value"}, + {Key: "connectFromField", Value: "fields.value"}, + {Key: "connectToField", Value: "id"}, + {Key: "depthField", Value: "depth"}, + {Key: "as", Value: "graph"}, + {Key: "restrictSearchWithMatch", Value: r.readFilter(bson.M{})}, + }}, + {Key: "$addFields", Value: bson.D{ + {Key: "firstGraph", Value: bson.D{ + {Key: "$slice", Value: []interface{}{ + bson.D{{Key: "$filter", Value: bson.D{ + {Key: "input", Value: "$graph"}, + {Key: "as", Value: "g"}, + {Key: "cond", Value: bson.D{ + {Key: "$eq", Value: []interface{}{"$$g.depth", 0}}, + }}, + }}}, + 0, + 1, + }}, + }}, + {Key: "graph", Value: bson.D{ + {Key: "$filter", Value: bson.D{ + {Key: "input", Value: bson.D{ + {Key: "$map", Value: bson.D{ + {Key: "input", Value: bson.D{ + {Key: "$map", Value: bson.D{ + {Key: "input", Value: "$graph"}, + {Key: "as", Value: "g"}, + {Key: "in", Value: bson.D{ + {Key: "$arrayElemAt", Value: []interface{}{ + bson.D{{Key: "$filter", Value: bson.D{ + {Key: "input", Value: "$#g.fields"}, + {Key: "as", Value: "f"}, + {Key: "cond", Value: bson.D{ + {Key: "$and", Value: bson.D{ + {Key: "$eq", Value: []interface{}{ + "$$f.id", + bson.D{ + {Key: "$arrayElemAt", Value: []interface{}{ + fieldsstr[1.], + "$$g.depth", + }}, + }, + }}, + }}, + {Key: "$eq", Value: []string{"$$f.type", "ref"}}, + }}, + }}}, + 0, + }}, + }}, + }}, + }}, + {Key: "as", Value: ""}, + {Key: "in", Value: bson.D{ + {Key: "$arrayElemAt", Value: []interface{}{ + bson.D{{Key: "$filter", Value: bson.D{ + {Key: "input", Value: "$graph"}, + {Key: "as", Value: "g1"}, + {Key: "cond", Value: bson.D{ + {Key: "$eq", Value: []string{ + "$$g1.id", + "$$g.value", + }}, + }}, + }}}, + 0, + }}, + }}, + }}, + }}, + {Key: "as", Value: "f"}, + {Key: "cond", Value: bson.D{ + {Key: "$ne", Value: []interface{}{"$$f", nil}}, + }}, + }}, + }}, + }}, + {Key: "$sort", Value: bson.D{ + {Key: "graph.depth", Value: 1}, + }}, + {Key: "$addFields", Value: bson.D{ + {Key: "graph", Value: bson.D{ + {Key: "$filter", Value: bson.D{ + {Key: "input", Value: bson.D{ + {Key: "$concatArrays", Value: []string{"$firstGraph", "$graph"}}, + }}, + {Key: "as", Value: "g"}, + {Key: "cond", Value: aggfilter}, + }}, + }}, + }}, + {Key: "$project", Value: bson.D{ + {Key: "firstGraph", Value: 0}, + {Key: "field", Value: 0}, + }}, + } + + cursor, err2 := r.client.Collection().Aggregate(ctx, pipeline) + if err2 != nil { + return nil, rerror.ErrInternalBy(err2) + } + defer func() { + _ = cursor.Close(ctx) + }() + + doc := mongodoc.DatasetExtendedDocument{} + if err2 := bson.Unmarshal(cursor.Current, &doc); err2 != nil { + return nil, rerror.ErrInternalBy(err2) + } + docs := make([]*mongodoc.DatasetExtendedDocument, 0, len(fields)) + for i := 0; i < len(fields); i++ { + var d2 *mongodoc.DatasetExtendedDocument + if i == 0 { + d2 = &doc + } else { + for _, d := range doc.Graph { + if i-1 == d.Depth { + d2 = d + } + } + } + docs = append(docs, d2) + } + res := make(dataset.List, 0, len(docs)) + for i, d := range docs { + if i > 0 && i-1 != d.Depth { + return nil, rerror.ErrInternalBy(errors.New("invalid order")) + } + ds, err2 := d.DatasetDocument.Model() + if err2 != nil { + return nil, rerror.ErrInternalBy(err2) + } + res = append(res, ds) + } + return res, nil +} + +func (r *datasetRepo) Save(ctx context.Context, dataset *dataset.Dataset) error { + if !r.f.CanWrite(dataset.Scene()) { + return repo.ErrOperationDenied + } + doc, id := mongodoc.NewDataset(dataset) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *datasetRepo) SaveAll(ctx context.Context, datasetList dataset.List) error { + if datasetList == nil || len(datasetList) == 0 { + return nil + } + docs, ids := mongodoc.NewDatasets(datasetList, r.f.Writable) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *datasetRepo) Remove(ctx context.Context, id id.DatasetID) error { + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) +} + +func (r *datasetRepo) RemoveAll(ctx context.Context, ids id.DatasetIDList) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ + "id": bson.M{"$in": ids.Strings()}, + })) +} + +func (r *datasetRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + _, err := r.client.Collection().DeleteMany(ctx, bson.D{ + {Key: "scene", Value: sceneID.String()}, + }) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +func (r *datasetRepo) find(ctx context.Context, dst dataset.List, filter interface{}) (dataset.List, error) { + c := mongodoc.DatasetConsumer{ + Rows: dst, + } + if err2 := r.client.Find(ctx, r.readFilter(filter), &c); err2 != nil { + return nil, rerror.ErrInternalBy(err2) + } + return c.Rows, nil +} + +func (r *datasetRepo) findOne(ctx context.Context, filter interface{}) (*dataset.Dataset, error) { + dst := make([]*dataset.Dataset, 0, 1) + c := mongodoc.DatasetConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func (r *datasetRepo) paginate(ctx context.Context, filter bson.M, pagination *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { + var c mongodoc.DatasetConsumer + pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), nil, pagination, &c) + if err != nil { + return nil, nil, rerror.ErrInternalBy(err) + } + return c.Rows, pageInfo, nil +} + +func filterDatasets(ids []id.DatasetID, rows []*dataset.Dataset) []*dataset.Dataset { + res := make([]*dataset.Dataset, 0, len(ids)) + for _, id := range ids { + var r2 *dataset.Dataset + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (r *datasetRepo) readFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Readable) +} + +func (r *datasetRepo) writeFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Writable) +} diff --git a/server/internal/infrastructure/mongo/dataset_schema.go b/server/internal/infrastructure/mongo/dataset_schema.go new file mode 100644 index 000000000..a16c00b65 --- /dev/null +++ b/server/internal/infrastructure/mongo/dataset_schema.go @@ -0,0 +1,203 @@ +package mongo + +import ( + "context" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type datasetSchemaRepo struct { + client *mongodoc.ClientCollection + f repo.SceneFilter +} + +func NewDatasetSchema(client *mongodoc.Client) repo.DatasetSchema { + r := &datasetSchemaRepo{client: client.WithCollection("datasetSchema")} + r.init() + return r +} + +func (r *datasetSchemaRepo) init() { + i := r.client.CreateIndex(context.Background(), []string{"scene"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "datasetSchema", i) + } +} + +func (r *datasetSchemaRepo) Filtered(f repo.SceneFilter) repo.DatasetSchema { + return &datasetSchemaRepo{ + client: r.client, + f: r.f.Merge(f), + } +} + +func (r *datasetSchemaRepo) FindByID(ctx context.Context, id id.DatasetSchemaID) (*dataset.Schema, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) +} + +func (r *datasetSchemaRepo) FindByIDs(ctx context.Context, ids id.DatasetSchemaIDList) (dataset.SchemaList, error) { + if len(ids) == 0 { + return nil, nil + } + + filter := bson.M{ + "id": bson.M{ + "$in": ids.Strings(), + }, + } + dst := make([]*dataset.Schema, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterDatasetSchemas(ids, res), nil +} + +func (r *datasetSchemaRepo) FindByScene(ctx context.Context, sceneID id.SceneID, pagination *usecase.Pagination) (dataset.SchemaList, *usecase.PageInfo, error) { + if !r.f.CanRead(sceneID) { + return nil, usecase.EmptyPageInfo(), nil + } + return r.paginate(ctx, bson.M{ + "scene": sceneID.String(), + }, pagination) +} + +func (r *datasetSchemaRepo) FindBySceneAll(ctx context.Context, sceneID id.SceneID) (dataset.SchemaList, error) { + if !r.f.CanRead(sceneID) { + return nil, nil + } + return r.find(ctx, nil, bson.M{ + "scene": sceneID.String(), + }) +} + +func (r *datasetSchemaRepo) FindDynamicByID(ctx context.Context, sid id.DatasetSchemaID) (*dataset.Schema, error) { + return r.findOne(ctx, bson.M{ + "id": sid.String(), + "dynamic": true, + }) +} + +func (r *datasetSchemaRepo) FindAllDynamicByScene(ctx context.Context, sceneID id.SceneID) (dataset.SchemaList, error) { + if !r.f.CanRead(sceneID) { + return nil, nil + } + return r.find(ctx, nil, bson.M{ + "scene": sceneID.String(), + "dynamic": true, + }) +} + +func (r *datasetSchemaRepo) FindBySceneAndSource(ctx context.Context, sceneID id.SceneID, source string) (dataset.SchemaList, error) { + if !r.f.CanRead(sceneID) { + return nil, nil + } + return r.find(ctx, nil, bson.M{ + "scene": sceneID.String(), + "source": string(source), + }) +} + +func (r *datasetSchemaRepo) Save(ctx context.Context, datasetSchema *dataset.Schema) error { + if !r.f.CanWrite(datasetSchema.Scene()) { + return repo.ErrOperationDenied + } + doc, id := mongodoc.NewDatasetSchema(datasetSchema) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *datasetSchemaRepo) SaveAll(ctx context.Context, datasetSchemas dataset.SchemaList) error { + if datasetSchemas == nil || len(datasetSchemas) == 0 { + return nil + } + docs, ids := mongodoc.NewDatasetSchemas(datasetSchemas, r.f.Writable) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *datasetSchemaRepo) Remove(ctx context.Context, id id.DatasetSchemaID) error { + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) +} + +func (r *datasetSchemaRepo) RemoveAll(ctx context.Context, ids id.DatasetSchemaIDList) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ + "id": bson.M{"$in": ids.Strings()}, + })) +} + +func (r *datasetSchemaRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + if _, err := r.client.Collection().DeleteMany(ctx, bson.M{ + "scene": sceneID.String(), + }); err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +func (r *datasetSchemaRepo) find(ctx context.Context, dst []*dataset.Schema, filter interface{}) ([]*dataset.Schema, error) { + c := mongodoc.DatasetSchemaConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *datasetSchemaRepo) findOne(ctx context.Context, filter interface{}) (*dataset.Schema, error) { + dst := make([]*dataset.Schema, 0, 1) + c := mongodoc.DatasetSchemaConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func (r *datasetSchemaRepo) paginate(ctx context.Context, filter bson.M, pagination *usecase.Pagination) ([]*dataset.Schema, *usecase.PageInfo, error) { + var c mongodoc.DatasetSchemaConsumer + pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), nil, pagination, &c) + if err != nil { + return nil, nil, rerror.ErrInternalBy(err) + } + return c.Rows, pageInfo, nil +} + +func filterDatasetSchemas(ids []id.DatasetSchemaID, rows []*dataset.Schema) []*dataset.Schema { + res := make([]*dataset.Schema, 0, len(ids)) + for _, id := range ids { + var r2 *dataset.Schema + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (r *datasetSchemaRepo) readFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Readable) +} + +func (r *datasetSchemaRepo) writeFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Writable) +} diff --git a/server/internal/infrastructure/mongo/layer.go b/server/internal/infrastructure/mongo/layer.go new file mode 100644 index 000000000..6db761329 --- /dev/null +++ b/server/internal/infrastructure/mongo/layer.go @@ -0,0 +1,363 @@ +package mongo + +import ( + "context" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type layerRepo struct { + client *mongodoc.ClientCollection + f repo.SceneFilter +} + +func NewLayer(client *mongodoc.Client) repo.Layer { + r := &layerRepo{client: client.WithCollection("layer")} + r.init() + return r +} + +func (r *layerRepo) init() { + i := r.client.CreateIndex(context.Background(), []string{"plugin", "extension", "scene", "group.layers", "tags.id", "tags.tags.id"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "layer", i) + } +} + +func (r *layerRepo) Filtered(f repo.SceneFilter) repo.Layer { + return &layerRepo{ + client: r.client, + f: r.f.Merge(f), + } +} + +func (r *layerRepo) FindByID(ctx context.Context, id id.LayerID) (layer.Layer, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) +} + +func (r *layerRepo) FindByIDs(ctx context.Context, ids id.LayerIDList) (layer.List, error) { + if len(ids) == 0 { + return nil, nil + } + + filter := bson.M{ + "id": bson.M{ + "$in": ids.Strings(), + }, + } + dst := make([]*layer.Layer, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterLayers(ids, res), nil +} + +func (r *layerRepo) FindAllByDatasetSchema(ctx context.Context, dsid id.DatasetSchemaID) (layer.List, error) { + return r.find(ctx, nil, bson.M{ + "group.linkeddatasetschema": dsid.String(), + }) +} + +func (r *layerRepo) FindItemByID(ctx context.Context, id id.LayerID) (*layer.Item, error) { + return r.findItemOne(ctx, bson.M{ + "id": id.String(), + }) +} + +func (r *layerRepo) FindItemByIDs(ctx context.Context, ids id.LayerIDList) (layer.ItemList, error) { + filter := bson.M{ + "id": bson.M{ + "$in": ids.Strings(), + }, + } + dst := make([]*layer.Item, 0, len(ids)) + res, err := r.findItems(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterLayerItems(ids, res), nil +} + +func (r *layerRepo) FindGroupByID(ctx context.Context, id id.LayerID) (*layer.Group, error) { + return r.findGroupOne(ctx, bson.M{ + "id": id.String(), + }) +} + +func (r *layerRepo) FindGroupByIDs(ctx context.Context, ids id.LayerIDList) (layer.GroupList, error) { + filter := bson.M{ + "id": bson.M{ + "$in": ids.Strings(), + }, + } + dst := make([]*layer.Group, 0, len(ids)) + res, err := r.findGroups(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterLayerGroups(ids, res), nil +} + +func (r *layerRepo) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, sceneID id.SceneID, datasetSchemaID id.DatasetSchemaID) (layer.GroupList, error) { + return r.findGroups(ctx, nil, bson.M{ + "scene": sceneID.String(), + "group.linkeddatasetschema": datasetSchemaID.String(), + }) +} + +func (r *layerRepo) FindParentsByIDs(ctx context.Context, ids id.LayerIDList) (layer.GroupList, error) { + return r.findGroups(ctx, nil, bson.M{ + "group.layers": bson.M{"$in": ids.Strings()}, + }) +} + +func (r *layerRepo) FindByPluginAndExtension(ctx context.Context, pid id.PluginID, eid *id.PluginExtensionID) (layer.List, error) { + filter := bson.M{ + "plugin": pid.String(), + } + if eid != nil { + filter["extension"] = eid.String() + } + return r.find(ctx, nil, filter) +} + +func (r *layerRepo) FindByPluginAndExtensionOfBlocks(ctx context.Context, pid id.PluginID, eid *id.PluginExtensionID) (layer.List, error) { + filter := bson.M{ + "infobox.fields.plugin": pid.String(), + } + if eid != nil { + filter["infobox.fields.extension"] = eid.String() + } + return r.find(ctx, nil, filter) +} + +func (r *layerRepo) FindByProperty(ctx context.Context, id id.PropertyID) (layer.Layer, error) { + return r.findOne(ctx, bson.M{ + "$or": []bson.M{ + {"property": id.String()}, + {"infobox.property": id.String()}, + {"infobox.fields.property": id.String()}, + }, + }) +} + +func (r *layerRepo) FindParentByID(ctx context.Context, id id.LayerID) (*layer.Group, error) { + return r.findGroupOne(ctx, bson.M{ + "group.layers": id.String(), + }) +} + +func (r *layerRepo) FindByScene(ctx context.Context, id id.SceneID) (layer.List, error) { + if !r.f.CanRead(id) { + return nil, nil + } + return r.find(ctx, nil, bson.M{ + "scene": id.String(), + }) +} + +func (r *layerRepo) FindByTag(ctx context.Context, tagID id.TagID) (layer.List, error) { + return r.find(ctx, nil, bson.M{ + "$or": []bson.M{ + {"tags.id": tagID.String()}, + {"tags.tags.id": tagID.String()}, + }, + }) +} + +func (r *layerRepo) Save(ctx context.Context, layer layer.Layer) error { + if !r.f.CanWrite(layer.Scene()) { + return repo.ErrOperationDenied + } + doc, id := mongodoc.NewLayer(layer) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *layerRepo) SaveAll(ctx context.Context, layers layer.List) error { + if layers == nil || len(layers) == 0 { + return nil + } + docs, ids := mongodoc.NewLayers(layers, r.f.Writable) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *layerRepo) UpdatePlugin(ctx context.Context, old, new id.PluginID) error { + return r.client.UpdateManyMany( + ctx, + []mongodoc.Update{ + { + Filter: r.writeFilter(bson.M{"plugin": old.String()}), + Update: bson.M{"plugin": new.String()}, + }, + { + Filter: r.writeFilter(bson.M{"infobox.fields": bson.M{"$type": "array"}}), + Update: bson.M{"infobox.fields.$[if].plugin": new.String()}, + ArrayFilters: []interface{}{ + bson.M{"if.plugin": old.String()}, + }, + }, + }, + ) +} + +func (r *layerRepo) Remove(ctx context.Context, id id.LayerID) error { + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) +} + +func (r *layerRepo) RemoveAll(ctx context.Context, ids id.LayerIDList) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ + "id": bson.M{"$in": ids.Strings()}, + })) +} + +func (r *layerRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + } + _, err := r.client.Collection().DeleteMany(ctx, filter) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +func (r *layerRepo) find(ctx context.Context, dst layer.List, filter interface{}) (layer.List, error) { + c := mongodoc.LayerConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *layerRepo) findOne(ctx context.Context, filter interface{}) (layer.Layer, error) { + c := mongodoc.LayerConsumer{} + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + if len(c.Rows) == 0 { + return nil, rerror.ErrNotFound + } + return *c.Rows[0], nil +} + +func (r *layerRepo) findItemOne(ctx context.Context, filter interface{}) (*layer.Item, error) { + c := mongodoc.LayerConsumer{} + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + if len(c.ItemRows) == 0 { + return nil, rerror.ErrNotFound + } + return c.ItemRows[0], nil +} + +func (r *layerRepo) findGroupOne(ctx context.Context, filter interface{}) (*layer.Group, error) { + c := mongodoc.LayerConsumer{} + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + if len(c.GroupRows) == 0 { + return nil, rerror.ErrNotFound + } + return c.GroupRows[0], nil +} + +func (r *layerRepo) findItems(ctx context.Context, dst layer.ItemList, filter interface{}) (layer.ItemList, error) { + c := mongodoc.LayerConsumer{ + ItemRows: dst, + } + if c.ItemRows != nil { + c.Rows = make(layer.List, 0, len(c.ItemRows)) + } + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.ItemRows, nil +} + +func (r *layerRepo) findGroups(ctx context.Context, dst layer.GroupList, filter interface{}) (layer.GroupList, error) { + c := mongodoc.LayerConsumer{ + GroupRows: dst, + } + if c.GroupRows != nil { + c.Rows = make(layer.List, 0, len(c.GroupRows)) + } + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.GroupRows, nil +} + +func filterLayers(ids []id.LayerID, rows []*layer.Layer) []*layer.Layer { + res := make([]*layer.Layer, 0, len(ids)) + for _, id := range ids { + var r2 *layer.Layer + for _, r := range rows { + if r == nil { + continue + } + if r3 := *r; r3 != nil && r3.ID() == id { + r2 = &r3 + break + } + } + res = append(res, r2) + } + return res +} + +func filterLayerItems(ids []id.LayerID, rows []*layer.Item) []*layer.Item { + res := make([]*layer.Item, 0, len(ids)) + for _, id := range ids { + var r2 *layer.Item + for _, r := range rows { + if r != nil && r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func filterLayerGroups(ids []id.LayerID, rows []*layer.Group) []*layer.Group { + res := make([]*layer.Group, 0, len(ids)) + for _, id := range ids { + var r2 *layer.Group + for _, r := range rows { + if r != nil && r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (r *layerRepo) readFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Readable) +} + +func (r *layerRepo) writeFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Writable) +} diff --git a/server/internal/infrastructure/mongo/lock.go b/server/internal/infrastructure/mongo/lock.go new file mode 100644 index 000000000..b7d9626c4 --- /dev/null +++ b/server/internal/infrastructure/mongo/lock.go @@ -0,0 +1,111 @@ +package mongo + +import ( + "context" + "errors" + "sync" + + "github.com/avast/retry-go/v4" + "github.com/google/uuid" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" + lock "github.com/square/mongo-lock" + "go.mongodb.org/mongo-driver/mongo" +) + +type Lock struct { + l *lock.Client + hostid string + locks sync.Map +} + +func NewLock(c *mongo.Collection) (repo.Lock, error) { + hostid := uuid.NewString() + + l := lock.NewClient(c) + if err := l.CreateIndexes(context.Background()); err != nil { + return nil, err + } + + return &Lock{ + l: l, + hostid: hostid, + }, nil +} + +func (r *Lock) Lock(ctx context.Context, name string) error { + if r.getLockID(name) != "" { + return repo.ErrAlreadyLocked + } + + lockID := uuid.NewString() + log.Infof("lock: trying to lock: id=%s, name=%s, host=%s", name, lockID, r.hostid) + + if err := retry.Do( + func() error { return r.l.XLock(ctx, name, lockID, r.details()) }, + retry.RetryIf(func(err error) bool { return errors.Is(err, lock.ErrAlreadyLocked) }), + ); err != nil { + log.Infof("lock: failed to lock: name=%s, id=%s, host=%s, err=%s", name, lockID, r.hostid, err) + return repo.ErrFailedToLock + } + + r.setLockID(name, lockID) + log.Infof("lock: locked: name=%s, id=%s, host=%s", name, lockID, r.hostid) + return nil +} + +func (r *Lock) Unlock(ctx context.Context, name string) error { + lockID := r.getLockID(name) + if lockID == "" { + return repo.ErrNotLocked + } + + if _, err := r.l.Unlock(ctx, lockID); err != nil { + return rerror.ErrInternalBy(err) + } + + r.deleteLockID(name) + log.Infof("lock: unlocked: name=%s, id=%s, host=%s", name, lockID, r.hostid) + return nil +} + +func (r *Lock) details() lock.LockDetails { + if r == nil { + return lock.LockDetails{} + } + + return lock.LockDetails{ + Host: r.hostid, + TTL: 60 * 60, // 1 hour + } +} + +func (r *Lock) setLockID(key, lockID string) { + if r == nil { + return + } + + r.locks.Store(key, lockID) +} + +func (r *Lock) getLockID(key string) string { + if r == nil { + return "" + } + + l, ok := r.locks.Load(key) + if !ok { + return "" + } + + return l.(string) +} + +func (r *Lock) deleteLockID(key string) { + if r == nil { + return + } + + r.locks.Delete(key) +} diff --git a/server/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go b/server/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go new file mode 100644 index 000000000..2d516d3c5 --- /dev/null +++ b/server/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go @@ -0,0 +1,46 @@ +package migration + +import ( + "context" + + "github.com/labstack/gommon/log" + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/pkg/id" + "go.mongodb.org/mongo-driver/bson" +) + +func AddSceneWidgetId(ctx context.Context, c DBClient) error { + col := c.WithCollection("scene") + + return col.Find(ctx, bson.D{}, &mongodoc.BatchConsumer{ + Size: 1000, + Callback: func(rows []bson.Raw) error { + + ids := make([]string, 0, len(rows)) + newRows := make([]interface{}, 0, len(rows)) + + log.Infof("migration: AddSceneWidgetId: hit scenes: %d\n", len(rows)) + + for _, row := range rows { + var doc mongodoc.SceneDocument + if err := bson.Unmarshal(row, &doc); err != nil { + return err + } + + widgets := make([]mongodoc.SceneWidgetDocument, 0, len(doc.Widgets)) + for _, w := range doc.Widgets { + if w.ID == "" { + w.ID = id.NewWidgetID().String() + } + widgets = append(widgets, w) + } + doc.Widgets = widgets + + ids = append(ids, doc.ID) + newRows = append(newRows, doc) + } + + return col.SaveAll(ctx, ids, newRows) + }, + }) +} diff --git a/server/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go b/server/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go new file mode 100644 index 000000000..38bd4c177 --- /dev/null +++ b/server/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go @@ -0,0 +1,68 @@ +package migration + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/visualizer" + "go.mongodb.org/mongo-driver/bson" +) + +var scenePropertySchema = builtin.MustPropertySchemaByVisualizer(visualizer.VisualizerCesium) + +func AddSceneDefaultTile(ctx context.Context, c DBClient) error { + col := c.WithCollection("property") + + psid := scenePropertySchema.ID().String() + filter := bson.M{ + "$or": bson.A{ + bson.M{"schema": psid, "items": bson.A{}}, + bson.M{"schema": psid, "items": bson.M{"$exists": false}}, + bson.M{ + "schema": psid, + "items": bson.M{ + "$not": bson.M{ + "$elemMatch": bson.M{ + "schemagroup": "tiles", + }, + }, + }, + }, + bson.M{ + "schema": psid, + "items": bson.M{ + "$elemMatch": bson.M{ + "schemagroup": "tiles", + "groups": bson.A{}, + }, + }, + }, + }, + } + + log.Infof("migration: AddSceneDefaultTile: filter: %+v\n", filter) + + return col.Find(ctx, filter, &mongodoc.PropertyBatchConsumer{ + Size: 1000, + Callback: func(properties []*property.Property) error { + log.Infof("migration: AddSceneDefaultTile: hit properties: %d\n", len(properties)) + + for _, p := range properties { + g := p.GetOrCreateGroupList(scenePropertySchema, property.PointItemBySchema(id.PropertySchemaGroupID("tiles"))) + if g == nil || g.Count() > 0 { + continue + } + f := property.NewGroup().NewID().SchemaGroup(id.PropertySchemaGroupID("tiles")).MustBuild() + g.Add(f, -1) + } + + docs, ids := mongodoc.NewProperties(properties, nil) + + return col.SaveAll(ctx, ids, docs) + }, + }) +} diff --git a/server/internal/infrastructure/mongo/migration/210310145844_remove_preview_token.go b/server/internal/infrastructure/mongo/migration/210310145844_remove_preview_token.go new file mode 100644 index 000000000..05a4b6630 --- /dev/null +++ b/server/internal/infrastructure/mongo/migration/210310145844_remove_preview_token.go @@ -0,0 +1,43 @@ +package migration + +import ( + "context" + + "github.com/labstack/gommon/log" + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "go.mongodb.org/mongo-driver/bson" +) + +func RemovePreviewToken(ctx context.Context, c DBClient) error { + col := c.WithCollection("project") + + return col.Find(ctx, bson.D{}, &mongodoc.BatchConsumer{ + Size: 1000, + Callback: func(rows []bson.Raw) error { + + ids := make([]string, 0, len(rows)) + newRows := make([]interface{}, 0, len(rows)) + + log.Infof("migration: RemoveProjectPreviewToken: hit projects: %d\n", len(rows)) + + for _, row := range rows { + doc := bson.M{} + if err := bson.Unmarshal(row, &doc); err != nil { + return err + } + + if doc["publishmentstatus"] == "limited" { + pt := doc["previewtoken"] + doc["alias"] = pt + } + delete(doc, "previewtoken") + + id := doc["id"].(string) + ids = append(ids, id) + newRows = append(newRows, doc) + } + + return col.SaveAll(ctx, ids, newRows) + }, + }) +} diff --git a/server/internal/infrastructure/mongo/migration/210730175108_add_scene_align_system.go b/server/internal/infrastructure/mongo/migration/210730175108_add_scene_align_system.go new file mode 100644 index 000000000..ebbfee702 --- /dev/null +++ b/server/internal/infrastructure/mongo/migration/210730175108_add_scene_align_system.go @@ -0,0 +1,74 @@ +package migration + +import ( + "context" + + "github.com/labstack/gommon/log" + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/scene" + "go.mongodb.org/mongo-driver/bson" +) + +func AddSceneAlignSystem(ctx context.Context, c DBClient) error { + col := c.WithCollection("scene") + + return col.Find(ctx, bson.D{}, &mongodoc.BatchConsumer{ + Size: 1000, + Callback: func(rows []bson.Raw) error { + ids := make([]string, 0, len(rows)) + newRows := make([]interface{}, 0, len(rows)) + + log.Infof("migration: AddSceneAlignSystem: hit scenes: %d\n", len(rows)) + + for _, row := range rows { + var doc mongodoc.SceneDocument + if err := bson.Unmarshal(row, &doc); err != nil { + return err + } + + swas := scene.NewWidgetAlignSystem() + + for _, w := range doc.Widgets { + wid, err := id.WidgetIDFrom(w.ID) + if err != nil { + continue + } + + pid, err := id.PluginIDFrom(w.Plugin) + if err != nil { + continue + } + + l := builtin.GetPlugin(pid).Extension(id.PluginExtensionID(w.Extension)).WidgetLayout() + if l == nil || l.Floating() { + continue + } + + dl := l.DefaultLocation() + if dl == nil { + dl = &plugin.WidgetLocation{ + Zone: plugin.WidgetZoneInner, + Section: plugin.WidgetSectionLeft, + Area: plugin.WidgetAreaTop, + } + } + + swas.Area(scene.WidgetLocation{ + Zone: scene.WidgetZoneType(dl.Zone), + Section: scene.WidgetSectionType(dl.Section), + Area: scene.WidgetAreaType(dl.Area), + }).Add(wid, -1) + } + + doc.AlignSystem = mongodoc.NewWidgetAlignSystem(swas) + ids = append(ids, doc.ID) + newRows = append(newRows, doc) + } + + return col.SaveAll(ctx, ids, newRows) + }, + }) +} diff --git a/server/internal/infrastructure/mongo/migration/220214180713_split_schema_of_properties.go b/server/internal/infrastructure/mongo/migration/220214180713_split_schema_of_properties.go new file mode 100644 index 000000000..df80ee240 --- /dev/null +++ b/server/internal/infrastructure/mongo/migration/220214180713_split_schema_of_properties.go @@ -0,0 +1,50 @@ +package migration + +import ( + "context" + + "github.com/labstack/gommon/log" + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/pkg/id" + "go.mongodb.org/mongo-driver/bson" +) + +func SplitSchemaOfProperties(ctx context.Context, c DBClient) error { + col := c.WithCollection("property") + + return col.Find(ctx, bson.M{ + "schema": bson.M{"$exists": true}, + }, &mongodoc.BatchConsumer{ + Size: 1000, + Callback: func(rows []bson.Raw) error { + ids := make([]string, 0, len(rows)) + newRows := make([]interface{}, 0, len(rows)) + + log.Infof("migration: SplitSchemaOfProperties: hit properties: %d\n", len(rows)) + + for _, row := range rows { + var doc mongodoc.PropertyDocument + if err := bson.Unmarshal(row, &doc); err != nil { + return err + } + if doc.Schema == "" { + continue + } + + s, err := id.PropertySchemaIDFrom(doc.Schema) + if err != nil { + return err + } + + doc.Schema = "" + doc.SchemaPlugin = s.Plugin().String() + doc.SchemaName = s.ID() + + ids = append(ids, doc.ID) + newRows = append(newRows, doc) + } + + return col.SaveAll(ctx, ids, newRows) + }, + }) +} diff --git a/server/internal/infrastructure/mongo/migration/220309174648_add_scene_field_to_property_schema.go b/server/internal/infrastructure/mongo/migration/220309174648_add_scene_field_to_property_schema.go new file mode 100644 index 000000000..4aad91156 --- /dev/null +++ b/server/internal/infrastructure/mongo/migration/220309174648_add_scene_field_to_property_schema.go @@ -0,0 +1,42 @@ +package migration + +import ( + "context" + + "github.com/labstack/gommon/log" + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/pkg/id" + "go.mongodb.org/mongo-driver/bson" +) + +func AddSceneFieldToPropertySchema(ctx context.Context, c DBClient) error { + col := c.WithCollection("propertySchema") + + return col.Find(ctx, bson.M{}, &mongodoc.BatchConsumer{ + Size: 1000, + Callback: func(rows []bson.Raw) error { + ids := make([]string, 0, len(rows)) + newRows := make([]interface{}, 0, len(rows)) + + log.Infof("migration: AddSceneFieldToPropertySchema: hit property schemas: %d\n", len(rows)) + + for _, row := range rows { + var doc mongodoc.PropertySchemaDocument + if err := bson.Unmarshal(row, &doc); err != nil { + return err + } + + s, err := id.PropertySchemaIDFrom(doc.ID) + if err != nil || s.Plugin().Scene() == nil { + continue + } + + doc.Scene = s.Plugin().Scene().StringRef() + ids = append(ids, doc.ID) + newRows = append(newRows, doc) + } + + return col.SaveAll(ctx, ids, newRows) + }, + }) +} diff --git a/server/internal/infrastructure/mongo/migration/client.go b/server/internal/infrastructure/mongo/migration/client.go new file mode 100644 index 000000000..23377dadd --- /dev/null +++ b/server/internal/infrastructure/mongo/migration/client.go @@ -0,0 +1,80 @@ +package migration + +import ( + "context" + "fmt" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type DBClient = *mongodoc.Client + +type MigrationFunc = func(context.Context, DBClient) error + +type Client struct { + Client *mongodoc.Client + Config repo.Config +} + +func (c Client) Migrate(ctx context.Context) (err error) { + config, err := c.Config.LockAndLoad(ctx) + if err != nil { + return fmt.Errorf("Failed to load config: %w", rerror.UnwrapErrInternal(err)) + } + defer func() { + if err2 := c.Config.Unlock(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + nextMigrations := config.NextMigrations(migrationKeys()) + if len(nextMigrations) == 0 { + return nil + } + + var tx repo.Tx + defer func() { + if tx != nil { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + } + }() + + for _, m := range nextMigrations { + tx, err = c.Client.BeginTransaction() + if err != nil { + return err + } + + log.Infof("DB migration: %d\n", m) + if err := migrations[m](ctx, c.Client); err != nil { + return fmt.Errorf("Failed to exec migration %d: %w", m, rerror.UnwrapErrInternal(err)) + } + + config.Migration = m + if err := c.Config.Save(ctx, config); err != nil { + return err + } + + tx.Commit() + if err := tx.End(ctx); err != nil { + tx = nil + return err + } + tx = nil + } + + return nil +} + +func migrationKeys() []int64 { + keys := make([]int64, 0, len(migrations)) + for k := range migrations { + keys = append(keys, k) + } + return keys +} diff --git a/server/internal/infrastructure/mongo/migration/migrations.go b/server/internal/infrastructure/mongo/migration/migrations.go new file mode 100644 index 000000000..2329b12cc --- /dev/null +++ b/server/internal/infrastructure/mongo/migration/migrations.go @@ -0,0 +1,17 @@ +// Code generated by migrategen, DO NOT EDIT. + +package migration + +// To add a new migration, run go run ./tools/cmd/migrategen migration_name + +// WARNING: +// If the migration takes too long, the deployment may fail in a serverless environment. +// Set the batch size to as large a value as possible without using up the RAM of the deployment destination. +var migrations = map[int64]MigrationFunc{ + 201217132559: AddSceneWidgetId, + 201217193948: AddSceneDefaultTile, + 210310145844: RemovePreviewToken, + 210730175108: AddSceneAlignSystem, + 220214180713: SplitSchemaOfProperties, + 220309174648: AddSceneFieldToPropertySchema, +} diff --git a/server/internal/infrastructure/mongo/mongo_test.go b/server/internal/infrastructure/mongo/mongo_test.go new file mode 100644 index 000000000..2e4d503cc --- /dev/null +++ b/server/internal/infrastructure/mongo/mongo_test.go @@ -0,0 +1,47 @@ +package mongo + +import ( + "context" + "encoding/hex" + "os" + "testing" + "time" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" + "go.mongodb.org/mongo-driver/x/mongo/driver/uuid" +) + +func connect(t *testing.T) func(*testing.T) *mongodoc.Client { + t.Helper() + + // Skip unit testing if "REEARTH_DB" is not configured + // See details: https://github.com/reearth/reearth/issues/273 + db := os.Getenv("REEARTH_DB") + if db == "" { + t.SkipNow() + return nil + } + + c, _ := mongo.Connect( + context.Background(), + options.Client(). + ApplyURI(db). + SetConnectTimeout(time.Second*10), + ) + + return func(t *testing.T) *mongodoc.Client { + t.Helper() + + database, _ := uuid.New() + databaseName := "reearth-test-" + hex.EncodeToString(database[:]) + client := mongodoc.NewClient(databaseName, c) + + t.Cleanup(func() { + _ = c.Database(databaseName).Drop(context.Background()) + }) + + return client + } +} diff --git a/server/internal/infrastructure/mongo/mongodoc/asset.go b/server/internal/infrastructure/mongo/mongodoc/asset.go new file mode 100644 index 000000000..511538ddc --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/asset.go @@ -0,0 +1,74 @@ +package mongodoc + +import ( + "time" + + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" + "go.mongodb.org/mongo-driver/bson" +) + +type AssetDocument struct { + ID string + CreatedAt time.Time + Team string + Name string + Size int64 + URL string + ContentType string +} + +type AssetConsumer struct { + Rows []*asset.Asset +} + +func (c *AssetConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc AssetDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + asset, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, asset) + return nil +} + +func NewAsset(asset *asset.Asset) (*AssetDocument, string) { + aid := asset.ID().String() + return &AssetDocument{ + ID: aid, + CreatedAt: asset.CreatedAt(), + Team: asset.Team().String(), + Name: asset.Name(), + Size: asset.Size(), + URL: asset.URL(), + ContentType: asset.ContentType(), + }, aid +} + +func (d *AssetDocument) Model() (*asset.Asset, error) { + aid, err := id.AssetIDFrom(d.ID) + if err != nil { + return nil, err + } + tid, err := id.TeamIDFrom(d.Team) + if err != nil { + return nil, err + } + + return asset.New(). + ID(aid). + CreatedAt(d.CreatedAt). + Team(tid). + Name(d.Name). + Size(d.Size). + URL(d.URL). + ContentType(d.ContentType). + Build() +} diff --git a/server/internal/infrastructure/mongo/mongodoc/auth_request.go b/server/internal/infrastructure/mongo/mongodoc/auth_request.go new file mode 100644 index 000000000..245e94c2d --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/auth_request.go @@ -0,0 +1,116 @@ +package mongodoc + +import ( + "time" + + "github.com/caos/oidc/pkg/oidc" + "github.com/reearth/reearth-backend/pkg/auth" + "github.com/reearth/reearth-backend/pkg/id" + "go.mongodb.org/mongo-driver/bson" +) + +type AuthRequestDocument struct { + ID string + ClientID string + Subject string + Code string + State string + ResponseType string + Scopes []string + Audiences []string + RedirectURI string + Nonce string + CodeChallenge *CodeChallengeDocument + CreatedAt time.Time + AuthorizedAt *time.Time +} + +type CodeChallengeDocument struct { + Challenge string + Method string +} + +type AuthRequestConsumer struct { + Rows []*auth.Request +} + +func (a *AuthRequestConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc AuthRequestDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + request, err := doc.Model() + if err != nil { + return err + } + a.Rows = append(a.Rows, request) + return nil +} + +func NewAuthRequest(req *auth.Request) (*AuthRequestDocument, string) { + if req == nil { + return nil, "" + } + reqID := req.GetID() + var cc *CodeChallengeDocument + if req.GetCodeChallenge() != nil { + cc = &CodeChallengeDocument{ + Challenge: req.GetCodeChallenge().Challenge, + Method: string(req.GetCodeChallenge().Method), + } + } + return &AuthRequestDocument{ + ID: reqID, + ClientID: req.GetClientID(), + Subject: req.GetSubject(), + Code: req.GetCode(), + State: req.GetState(), + ResponseType: string(req.GetResponseType()), + Scopes: req.GetScopes(), + Audiences: req.GetAudience(), + RedirectURI: req.GetRedirectURI(), + Nonce: req.GetNonce(), + CodeChallenge: cc, + CreatedAt: req.CreatedAt(), + AuthorizedAt: req.AuthorizedAt(), + }, reqID +} + +func (d *AuthRequestDocument) Model() (*auth.Request, error) { + if d == nil { + return nil, nil + } + + ulid, err := id.AuthRequestIDFrom(d.ID) + if err != nil { + return nil, err + } + + var cc *oidc.CodeChallenge + if d.CodeChallenge != nil { + cc = &oidc.CodeChallenge{ + Challenge: d.CodeChallenge.Challenge, + Method: oidc.CodeChallengeMethod(d.CodeChallenge.Method), + } + } + var req = auth.NewRequest(). + ID(ulid). + ClientID(d.ClientID). + Subject(d.Subject). + Code(d.Code). + State(d.State). + ResponseType(oidc.ResponseType(d.ResponseType)). + Scopes(d.Scopes). + Audiences(d.Audiences). + RedirectURI(d.RedirectURI). + Nonce(d.Nonce). + CodeChallenge(cc). + CreatedAt(d.CreatedAt). + AuthorizedAt(d.AuthorizedAt). + MustBuild() + return req, nil +} diff --git a/server/internal/infrastructure/mongo/mongodoc/client.go b/server/internal/infrastructure/mongo/mongodoc/client.go new file mode 100644 index 000000000..7c651998a --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/client.go @@ -0,0 +1,439 @@ +package mongodoc + +import ( + "context" + "errors" + "fmt" + "io" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/rerror" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +type Client struct { + database string + client *mongo.Client +} + +func NewClient(database string, c *mongo.Client) *Client { + return &Client{ + database: database, + client: c, + } +} + +func (c *Client) WithCollection(col string) *ClientCollection { + return &ClientCollection{ + Client: c, + CollectionName: col, + } +} + +func (c *Client) Collection(col string) *mongo.Collection { + return c.client.Database(c.database).Collection(col) +} + +func (c *Client) Find(ctx context.Context, col string, filter interface{}, consumer Consumer) error { + cursor, err := c.Collection(col).Find(ctx, filter) + if errors.Is(err, mongo.ErrNilDocument) || errors.Is(err, mongo.ErrNoDocuments) { + return rerror.ErrNotFound + } + if err != nil { + return rerror.ErrInternalBy(err) + } + defer func() { + _ = cursor.Close(ctx) + }() + + for { + c := cursor.Next(ctx) + if err := cursor.Err(); err != nil && !errors.Is(err, io.EOF) { + return rerror.ErrInternalBy(err) + } + + if !c { + if err := consumer.Consume(nil); err != nil { + return rerror.ErrInternalBy(err) + } + break + } + + if err := consumer.Consume(cursor.Current); err != nil { + return rerror.ErrInternalBy(err) + } + } + return nil +} + +func (c *Client) FindOne(ctx context.Context, col string, filter interface{}, consumer Consumer) error { + raw, err := c.Collection(col).FindOne(ctx, filter).DecodeBytes() + if errors.Is(err, mongo.ErrNilDocument) || errors.Is(err, mongo.ErrNoDocuments) { + return rerror.ErrNotFound + } + if err := consumer.Consume(raw); err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +func (c *Client) Count(ctx context.Context, col string, filter interface{}) (int64, error) { + count, err := c.Collection(col).CountDocuments(ctx, filter) + if err != nil { + return count, rerror.ErrInternalBy(err) + } + return count, nil +} + +func (c *Client) RemoveOne(ctx context.Context, col string, f interface{}) error { + _, err := c.Collection(col).DeleteOne(ctx, f) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +func (c *Client) RemoveAll(ctx context.Context, col string, f interface{}) error { + _, err := c.Collection(col).DeleteMany(ctx, f) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +var ( + replaceOption = (&options.ReplaceOptions{}).SetUpsert(true) +) + +func (c *Client) SaveOne(ctx context.Context, col string, id string, replacement interface{}) error { + _, err := c.Collection(col).ReplaceOne(ctx, bson.M{"id": id}, replacement, replaceOption) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +func (c *Client) SaveAll(ctx context.Context, col string, ids []string, updates []interface{}) error { + if len(ids) == 0 || len(updates) == 0 { + return nil + } + if len(ids) != len(updates) { + return rerror.ErrInternalBy(errors.New("invalid save args")) + } + + writeModels := make([]mongo.WriteModel, 0, len(updates)) + for i, u := range updates { + id := ids[i] + writeModels = append( + writeModels, + (&mongo.ReplaceOneModel{}). + SetUpsert(true). + SetFilter(bson.M{"id": id}). + SetReplacement(u), + ) + } + + _, err := c.Collection(col).BulkWrite(ctx, writeModels) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +func (c *Client) UpdateMany(ctx context.Context, col string, filter, update interface{}) error { + _, err := c.Collection(col).UpdateMany(ctx, filter, bson.M{ + "$set": update, + }) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +type Update struct { + Filter interface{} + Update interface{} + ArrayFilters []interface{} +} + +func (c *Client) UpdateManyMany(ctx context.Context, col string, updates []Update) error { + writeModels := make([]mongo.WriteModel, 0, len(updates)) + for _, w := range updates { + wm := mongo.NewUpdateManyModel().SetFilter(w.Filter).SetUpdate(bson.M{ + "$set": w.Update, + }) + if len(w.ArrayFilters) > 0 { + wm.SetArrayFilters(options.ArrayFilters{ + Filters: w.ArrayFilters, + }) + } + writeModels = append(writeModels, wm) + } + + _, err := c.Collection(col).BulkWrite(ctx, writeModels) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +func getCursor(raw bson.Raw, key string) (*usecase.Cursor, error) { + val, err := raw.LookupErr(key) + if err != nil { + return nil, fmt.Errorf("failed to lookup cursor: %v", err.Error()) + } + var s string + if err := val.Unmarshal(&s); err != nil { + return nil, fmt.Errorf("failed to unmarshal cursor: %v", err.Error()) + } + c := usecase.Cursor(s) + return &c, nil +} + +func (c *Client) Paginate(ctx context.Context, col string, filter interface{}, sort *string, p *Pagination, consumer Consumer) (*usecase.PageInfo, error) { + if p == nil { + return nil, nil + } + coll := c.Collection(col) + const key = "id" + + findOptions := options.Find().SetCollation(&options.Collation{Strength: 1, Locale: "en"}) + + sortOptions, sortKey := sortOptionsFrom(sort, p, key) + + findOptions.Sort = sortOptions + + count, err := coll.CountDocuments(ctx, filter) + if err != nil { + return nil, fmt.Errorf("failed to count documents: %v", err.Error()) + } + + filter, limit, err := paginationFilter(ctx, coll, p, sortKey, key, filter) + if err != nil { + return nil, err + } + + // ๆ›ดใซ่ชญใ‚ใ‚‹่ฆ็ด ใŒใ‚ใ‚‹ใฎใ‹็ขบใ‹ใ‚ใ‚‹ใŸใ‚ใซไธ€ใคๅคšใ‚ใซ่ชญใฟๅ‡บใ™ + // Read one more element so that we can see whether there's a further one + limit++ + findOptions.Limit = &limit + + cursor, err := coll.Find(ctx, filter, findOptions) + if err != nil { + return nil, fmt.Errorf("failed to find: %v", err.Error()) + } + defer func() { + _ = cursor.Close(ctx) + }() + + results := make([]bson.Raw, 0, limit) + for cursor.Next(ctx) { + raw := make(bson.Raw, len(cursor.Current)) + copy(raw, cursor.Current) + results = append(results, raw) + } + if err := cursor.Err(); err != nil { + return nil, fmt.Errorf("failed to read cursor: %v", err.Error()) + } + + hasMore := false + if len(results) == int(limit) { + hasMore = true + // Remove the extra one reading. + results = results[:len(results)-1] + } + + for _, result := range results { + if err := consumer.Consume(result); err != nil { + return nil, err + } + } + + var startCursor, endCursor *usecase.Cursor + if len(results) > 0 { + sc, err := getCursor(results[0], key) + if err != nil { + return nil, fmt.Errorf("failed to get start cursor: %v", err.Error()) + } + startCursor = sc + ec, err := getCursor(results[len(results)-1], key) + if err != nil { + return nil, fmt.Errorf("failed to get end cursor: %v", err.Error()) + } + endCursor = ec + } + + // ref: https://facebook.github.io/relay/graphql/connections.htm#sec-undefined.PageInfo.Fields + // If first is set, false can be returned unless it can be efficiently determined whether or not a previous page exists. + // If last is set, false can be returned unless it can be efficiently determined whether or not a next page exists. + // Returning absolutely false because the existing implementation cannot determine it efficiently. + var hasNextPage, hasPreviousPage bool + switch { + case p.First != nil: + hasNextPage = hasMore + case p.Last != nil: + hasPreviousPage = hasMore + } + + return usecase.NewPageInfo(int(count), startCursor, endCursor, hasNextPage, hasPreviousPage), nil +} + +func sortOptionsFrom(sort *string, p *Pagination, key string) (bson.D, string) { + var sortOptions bson.D + var sortKey = "" + if sort != nil && len(*sort) > 0 && *sort != "id" { + sortKey = *sort + sortOptions = append(sortOptions, bson.E{Key: sortKey, Value: p.SortDirection()}) + } + sortOptions = append(sortOptions, bson.E{Key: key, Value: p.SortDirection()}) + return sortOptions, sortKey +} + +func paginationFilter(ctx context.Context, coll *mongo.Collection, p *Pagination, sortKey, key string, filter interface{}) (interface{}, int64, error) { + limit, op, cur, err := p.Parameters() + if err != nil { + return nil, 0, fmt.Errorf("failed to parse pagination parameters: %w", err) + } + + var paginationFilter bson.M + + if cur != nil { + + if sortKey == "" { + paginationFilter = bson.M{key: bson.M{op: *cur}} + } else { + var curObj bson.M + if err := coll.FindOne(ctx, bson.M{key: *cur}).Decode(&curObj); err != nil { + return nil, 0, fmt.Errorf("failed to find cursor element") + } + if curObj[sortKey] == nil { + return nil, 0, fmt.Errorf("invalied sort key") + } + paginationFilter = bson.M{ + "$or": []bson.M{ + {sortKey: bson.M{op: curObj[sortKey]}}, + { + sortKey: curObj[sortKey], + key: bson.M{op: *cur}, + }, + }, + } + } + } + + return And( + filter, + "", + paginationFilter, + ), limit, nil +} + +func (c *Client) CreateIndex(ctx context.Context, col string, keys []string) []string { + return c.CreateUniqueIndex(ctx, col, keys, []string{}) +} + +func (c *Client) CreateUniqueIndex(ctx context.Context, col string, keys, uniqueKeys []string) []string { + coll := c.Collection(col) + indexedKeys := indexes(ctx, coll) + + // store unique keys as map to check them in an efficient way + ukm := map[string]struct{}{} + for _, k := range append([]string{"id"}, uniqueKeys...) { + ukm[k] = struct{}{} + } + + var newIndexes []mongo.IndexModel + for _, k := range append([]string{"id"}, keys...) { + if _, ok := indexedKeys[k]; ok { + continue + } + indexBg := true + _, isUnique := ukm[k] + newIndexes = append(newIndexes, mongo.IndexModel{ + Keys: map[string]int{ + k: 1, + }, + Options: &options.IndexOptions{ + Background: &indexBg, + Unique: &isUnique, + }, + }) + } + + if len(newIndexes) > 0 { + index, err := coll.Indexes().CreateMany(ctx, newIndexes) + if err != nil { + panic(err) + } + return index + } + return nil +} + +func indexes(ctx context.Context, coll *mongo.Collection) map[string]struct{} { + c, err := coll.Indexes().List(ctx, nil) + if err != nil { + panic(err) + } + indexes := []struct{ Key map[string]int }{} + err = c.All(ctx, &indexes) + if err != nil { + panic(err) + } + keys := map[string]struct{}{} + for _, i := range indexes { + for k := range i.Key { + keys[k] = struct{}{} + } + } + return keys +} + +func (c *Client) BeginTransaction() (repo.Tx, error) { + s, err := c.client.StartSession() + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + + if err := s.StartTransaction(&options.TransactionOptions{}); err != nil { + return nil, rerror.ErrInternalBy(err) + } + + return &Tx{session: s, commit: false}, nil +} + +type Tx struct { + session mongo.Session + commit bool +} + +func (t *Tx) Commit() { + if t == nil { + return + } + t.commit = true +} + +func (t *Tx) End(ctx context.Context) error { + if t == nil { + return nil + } + + if t.commit { + if err := t.session.CommitTransaction(ctx); err != nil { + return rerror.ErrInternalBy(err) + } + } else if err := t.session.AbortTransaction(ctx); err != nil { + return rerror.ErrInternalBy(err) + } + + t.session.EndSession(ctx) + return nil +} + +func (t *Tx) IsCommitted() bool { + return t.commit +} diff --git a/server/internal/infrastructure/mongo/mongodoc/clientcol.go b/server/internal/infrastructure/mongo/mongodoc/clientcol.go new file mode 100644 index 000000000..5a2d29ae4 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/clientcol.go @@ -0,0 +1,65 @@ +package mongodoc + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "go.mongodb.org/mongo-driver/mongo" +) + +type ClientCollection struct { + Client *Client + CollectionName string +} + +func (c *ClientCollection) Collection() *mongo.Collection { + return c.Client.Collection(c.CollectionName) +} + +func (c *ClientCollection) FindOne(ctx context.Context, filter interface{}, consumer Consumer) error { + return c.Client.FindOne(ctx, c.CollectionName, filter, consumer) +} + +func (c *ClientCollection) Find(ctx context.Context, filter interface{}, consumer Consumer) error { + return c.Client.Find(ctx, c.CollectionName, filter, consumer) +} + +func (c *ClientCollection) Count(ctx context.Context, filter interface{}) (int64, error) { + return c.Client.Count(ctx, c.CollectionName, filter) +} + +func (c *ClientCollection) Paginate(ctx context.Context, filter interface{}, sort *string, p *usecase.Pagination, consumer Consumer) (*usecase.PageInfo, error) { + return c.Client.Paginate(ctx, c.CollectionName, filter, sort, PaginationFrom(p), consumer) +} + +func (c *ClientCollection) SaveOne(ctx context.Context, id string, replacement interface{}) error { + return c.Client.SaveOne(ctx, c.CollectionName, id, replacement) +} + +func (c *ClientCollection) SaveAll(ctx context.Context, ids []string, updates []interface{}) error { + return c.Client.SaveAll(ctx, c.CollectionName, ids, updates) +} + +func (c *ClientCollection) UpdateMany(ctx context.Context, filter interface{}, update interface{}) error { + return c.Client.UpdateMany(ctx, c.CollectionName, filter, update) +} + +func (c *ClientCollection) UpdateManyMany(ctx context.Context, updates []Update) error { + return c.Client.UpdateManyMany(ctx, c.CollectionName, updates) +} + +func (c *ClientCollection) RemoveOne(ctx context.Context, f interface{}) error { + return c.Client.RemoveOne(ctx, c.CollectionName, f) +} + +func (c *ClientCollection) RemoveAll(ctx context.Context, f interface{}) error { + return c.Client.RemoveAll(ctx, c.CollectionName, f) +} + +func (c *ClientCollection) CreateIndex(ctx context.Context, keys []string) []string { + return c.Client.CreateIndex(ctx, c.CollectionName, keys) +} + +func (c *ClientCollection) CreateUniqueIndex(ctx context.Context, keys, uniqueKeys []string) []string { + return c.Client.CreateUniqueIndex(ctx, c.CollectionName, keys, uniqueKeys) +} diff --git a/server/internal/infrastructure/mongo/mongodoc/config.go b/server/internal/infrastructure/mongo/mongodoc/config.go new file mode 100644 index 000000000..d20ca1288 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/config.go @@ -0,0 +1,42 @@ +package mongodoc + +import "github.com/reearth/reearth-backend/pkg/config" + +type ConfigDocument struct { + Migration int64 + Auth *Auth +} + +type Auth struct { + Cert string + Key string +} + +func NewConfig(c config.Config) ConfigDocument { + d := ConfigDocument{ + Migration: c.Migration, + } + if c.Auth != nil { + d.Auth = &Auth{ + Cert: c.Auth.Cert, + Key: c.Auth.Key, + } + } + return d +} + +func (c *ConfigDocument) Model() *config.Config { + if c == nil { + return &config.Config{} + } + m := &config.Config{ + Migration: c.Migration, + } + if c.Auth != nil { + m.Auth = &config.Auth{ + Cert: c.Auth.Cert, + Key: c.Auth.Key, + } + } + return m +} diff --git a/server/internal/infrastructure/mongo/mongodoc/consumer.go b/server/internal/infrastructure/mongo/mongodoc/consumer.go new file mode 100644 index 000000000..0550bad31 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/consumer.go @@ -0,0 +1,42 @@ +package mongodoc + +import "go.mongodb.org/mongo-driver/bson" + +type Consumer interface { + // Consume ใงๆธกใ•ใ‚ŒใŸrawใฎๅ‚็…งใ‚’ใƒ•ใ‚ฃใƒผใƒซใƒ‰ใซๆŒใฃใฆใฏใ„ใ‘ใพใ›ใ‚“ + // MUST NOT HAVE A ROW REFERENCE PASSED BY Consume METHOD IN THE FIELD + Consume(raw bson.Raw) error +} + +type FuncConsumer func(raw bson.Raw) error + +func (c FuncConsumer) Consume(raw bson.Raw) error { + return c(raw) +} + +type BatchConsumer struct { + Size int + Rows []bson.Raw + Callback func([]bson.Raw) error +} + +func (c *BatchConsumer) Consume(raw bson.Raw) error { + size := c.Size + if size == 0 { + size = 10 + } + + if raw != nil { + c.Rows = append(c.Rows, raw) + } + + if raw == nil || len(c.Rows) >= size { + err := c.Callback(c.Rows) + c.Rows = []bson.Raw{} + if err != nil { + return err + } + } + + return nil +} diff --git a/server/internal/infrastructure/mongo/mongodoc/consumer_test.go b/server/internal/infrastructure/mongo/mongodoc/consumer_test.go new file mode 100644 index 000000000..98c59b1f0 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/consumer_test.go @@ -0,0 +1,60 @@ +package mongodoc + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" + "go.mongodb.org/mongo-driver/bson" +) + +var _ Consumer = FuncConsumer(nil) + +func TestBatchConsumer(t *testing.T) { + c := &BatchConsumer{ + Size: 10, + Callback: func(r []bson.Raw) error { + assert.Equal(t, []bson.Raw{[]byte{0}, []byte{1}, []byte{2}, []byte{3}, []byte{4}}, r) + return nil + }, + } + + for i := 0; i < 5; i++ { + r := bson.Raw([]byte{byte(i)}) + assert.Nil(t, c.Consume(r)) + } + assert.Nil(t, c.Consume(nil)) +} + +func TestBatchConsumerWithManyRows(t *testing.T) { + counter := 0 + c := &BatchConsumer{ + Size: 1, + Callback: func(r []bson.Raw) error { + if counter >= 5 { + assert.Equal(t, []bson.Raw{}, r) + return nil + } + assert.Equal(t, []bson.Raw{[]byte{byte(counter)}}, r) + counter++ + return nil + }, + } + + for i := 0; i < 5; i++ { + r := bson.Raw([]byte{byte(i)}) + assert.Nil(t, c.Consume(r)) + } + assert.Nil(t, c.Consume(nil)) +} + +func TestBatchConsumerWithError(t *testing.T) { + c := &BatchConsumer{ + Size: 1, + Callback: func(r []bson.Raw) error { + return errors.New("hoge") + }, + } + + assert.EqualError(t, c.Consume(nil), "hoge") +} diff --git a/server/internal/infrastructure/mongo/mongodoc/dataset.go b/server/internal/infrastructure/mongo/mongodoc/dataset.go new file mode 100644 index 000000000..3cef6b719 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/dataset.go @@ -0,0 +1,188 @@ +package mongodoc + +import ( + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type DatasetDocument struct { + ID string + Source string + Schema string + Fields []*DatasetFieldDocument + Scene string +} + +type DatasetFieldDocument struct { + Field string + Type string + Value interface{} + Source string +} + +type DatasetExtendedDocument struct { + DatasetDocument + Graph []*DatasetExtendedDocument + Depth int +} + +type DatasetConsumer struct { + Rows []*dataset.Dataset +} + +func (c *DatasetConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc DatasetDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + dataset, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, dataset) + return nil +} + +type DatasetMapConsumer struct { + Map dataset.Map +} + +func (c *DatasetMapConsumer) Consume(raw bson.Raw) error { + var doc DatasetDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + m, err := doc.Model() + if err != nil { + return err + } + if c.Map == nil { + c.Map = map[id.DatasetID]*dataset.Dataset{ + m.ID(): m, + } + } else { + c.Map[m.ID()] = m + } + return nil +} + +type DatasetBatchConsumer struct { + Size int + Callback func([]*dataset.Dataset) error + consumer *BatchConsumer +} + +func (c *DatasetBatchConsumer) Consume(raw bson.Raw) error { + if c.consumer == nil { + c.consumer = &BatchConsumer{ + Size: c.Size, + Callback: func(rows []bson.Raw) error { + datasets := make([]*dataset.Dataset, 0, len(rows)) + + for _, r := range rows { + var doc DatasetDocument + if err := bson.Unmarshal(r, &doc); err != nil { + return err + } + dataset, err := doc.Model() + if err != nil { + return err + } + + datasets = append(datasets, dataset) + } + + return c.Callback(datasets) + }, + } + } + + return c.consumer.Consume(raw) +} + +func (doc *DatasetDocument) Model() (*dataset.Dataset, error) { + did, err := id.DatasetIDFrom(doc.ID) + if err != nil { + return nil, err + } + scene, err := id.SceneIDFrom(doc.Scene) + if err != nil { + return nil, err + } + ds, err := id.DatasetSchemaIDFrom(doc.Schema) + if err != nil { + return nil, err + } + fields := make([]*dataset.Field, 0, len(doc.Fields)) + for _, field := range doc.Fields { + fid, err := id.DatasetFieldIDFrom(field.Field) + if err != nil { + return nil, err + } + f := dataset.NewField( + fid, + toModelDatasetValue(field.Value, field.Type), + field.Source, + ) + fields = append(fields, f) + } + return dataset.New(). + ID(did). + Source(doc.Source). + Fields(fields). + Schema(ds). + Scene(scene). + Build() +} + +func NewDataset(dataset *dataset.Dataset) (*DatasetDocument, string) { + did := dataset.ID().String() + var doc DatasetDocument + doc.ID = did + doc.Source = dataset.Source() + doc.Scene = dataset.Scene().String() + doc.Schema = dataset.Schema().String() + + fields := dataset.Fields() + doc.Fields = make([]*DatasetFieldDocument, 0, len(fields)) + for _, f := range fields { + doc.Fields = append(doc.Fields, &DatasetFieldDocument{ + Field: f.Field().String(), + Type: string(f.Type()), + Value: f.Value().Interface(), + Source: f.Source(), + }) + } + return &doc, did +} + +func NewDatasets(datasets []*dataset.Dataset, f scene.IDList) ([]interface{}, []string) { + res := make([]interface{}, 0, len(datasets)) + ids := make([]string, 0, len(datasets)) + for _, d := range datasets { + if d == nil || f != nil && !f.Has(d.Scene()) { + continue + } + r, id := NewDataset(d) + res = append(res, r) + ids = append(ids, id) + } + return res, ids +} + +func toModelDatasetValue(v interface{}, t string) *dataset.Value { + if v == nil { + return nil + } + if v2, ok := v.(bson.D); ok { + v = v2.Map() + } + return dataset.ValueType(t).ValueFrom(v) +} diff --git a/server/internal/infrastructure/mongo/mongodoc/dataset_schema.go b/server/internal/infrastructure/mongo/mongodoc/dataset_schema.go new file mode 100644 index 000000000..45ef65e04 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/dataset_schema.go @@ -0,0 +1,130 @@ +package mongodoc + +import ( + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type DatasetSchemaDocument struct { + ID string + Source string + Name string + Fields []*DatasetSchemaFieldDocument + RepresentativeField *string + Scene string + Dynamic bool +} + +type DatasetSchemaFieldDocument struct { + ID string + Name string + Type string + Source string +} + +type DatasetSchemaConsumer struct { + Rows []*dataset.Schema +} + +func (c *DatasetSchemaConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc DatasetSchemaDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + dataset, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, dataset) + return nil +} + +func (d *DatasetSchemaDocument) Model() (*dataset.Schema, error) { + did, err := id.DatasetSchemaIDFrom(d.ID) + if err != nil { + return nil, err + } + scene, err := id.SceneIDFrom(d.Scene) + if err != nil { + return nil, err + } + + fields := make([]*dataset.SchemaField, 0, len(d.Fields)) + for _, field := range d.Fields { + fid, err := id.DatasetFieldIDFrom(field.ID) + if err != nil { + return nil, err + } + vt := dataset.ValueType(field.Type) + f, err := dataset.NewSchemaField(). + Name(field.Name). + ID(fid). + Type(vt). + Source(field.Source). + Build() + if err != nil { + return nil, err + } + fields = append(fields, f) + } + b := dataset.NewSchema(). + ID(did). + Name(d.Name). + Source(d.Source). + Scene(scene). + Fields(fields) + if d.RepresentativeField != nil { + dsfid, err := id.DatasetFieldIDFrom(*d.RepresentativeField) + if err != nil { + return nil, err + } + b.RepresentativeField(dsfid) + } + return b.Build() +} + +func NewDatasetSchema(dataset *dataset.Schema) (*DatasetSchemaDocument, string) { + did := dataset.ID().String() + doc := DatasetSchemaDocument{ + ID: did, + Name: dataset.Name(), + Source: dataset.Source(), + Scene: dataset.Scene().String(), + RepresentativeField: dataset.RepresentativeFieldID().StringRef(), + Dynamic: dataset.Dynamic(), + } + + fields := dataset.Fields() + doc.Fields = make([]*DatasetSchemaFieldDocument, 0, len(fields)) + for _, f := range fields { + doc.Fields = append(doc.Fields, &DatasetSchemaFieldDocument{ + ID: f.ID().String(), + Type: string(f.Type()), + Name: f.Name(), + Source: f.Source(), + }) + } + + return &doc, did +} + +func NewDatasetSchemas(datasetSchemas []*dataset.Schema, f scene.IDList) ([]interface{}, []string) { + res := make([]interface{}, 0, len(datasetSchemas)) + ids := make([]string, 0, len(datasetSchemas)) + for _, d := range datasetSchemas { + if d == nil || f != nil && !f.Has(d.Scene()) { + continue + } + r, id := NewDatasetSchema(d) + res = append(res, r) + ids = append(ids, id) + } + return res, ids +} diff --git a/server/internal/infrastructure/mongo/mongodoc/layer.go b/server/internal/infrastructure/mongo/mongodoc/layer.go new file mode 100644 index 000000000..3d8686cd6 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/layer.go @@ -0,0 +1,359 @@ +package mongodoc + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/scene" + "go.mongodb.org/mongo-driver/bson" +) + +type LayerDocument struct { + ID string + Name string + Visible bool + Scene string + Plugin *string + Extension *string + Property *string + Infobox *LayerInfoboxDocument + Item *LayerItemDocument + Group *LayerGroupDocument + Tags LayerTagListDocument +} + +type LayerItemDocument struct { + LinkedDataset *string +} + +type LayerGroupDocument struct { + Layers []string + LinkedDatasetSchema *string + Root bool +} + +type LayerInfoboxFieldDocument struct { + ID string + Plugin string + Extension string + Property string +} + +type LayerInfoboxDocument struct { + Property string + Fields []LayerInfoboxFieldDocument +} + +type LayerTagDocument struct { + ID string + Group bool + Tags []LayerTagDocument +} + +type LayerTagListDocument []LayerTagDocument + +type LayerConsumer struct { + Rows []*layer.Layer + GroupRows []*layer.Group + ItemRows []*layer.Item +} + +func (c *LayerConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc LayerDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + li, lg, err := doc.Model() + if err != nil { + return err + } + if li != nil { + var layer layer.Layer = li + c.Rows = append(c.Rows, &layer) + c.ItemRows = append(c.ItemRows, li) + } + if lg != nil { + var layer layer.Layer = lg + c.Rows = append(c.Rows, &layer) + c.GroupRows = append(c.GroupRows, lg) + } + return nil +} + +func NewLayer(l layer.Layer) (*LayerDocument, string) { + var group *LayerGroupDocument + var item *LayerItemDocument + var infobox *LayerInfoboxDocument + + if lg := layer.GroupFromLayer(l); lg != nil { + group = &LayerGroupDocument{ + Layers: lg.Layers().Strings(), + LinkedDatasetSchema: lg.LinkedDatasetSchema().StringRef(), + Root: lg.IsRoot(), + } + } + + if li := layer.ItemFromLayer(l); li != nil { + item = &LayerItemDocument{ + LinkedDataset: li.LinkedDataset().StringRef(), + } + } + + if ib := l.Infobox(); ib != nil { + ibfields := ib.Fields() + fields := make([]LayerInfoboxFieldDocument, 0, len(ibfields)) + for _, f := range ibfields { + fields = append(fields, LayerInfoboxFieldDocument{ + ID: f.ID().String(), + Plugin: f.Plugin().String(), + Extension: string(f.Extension()), + Property: f.Property().String(), + }) + } + infobox = &LayerInfoboxDocument{ + Property: ib.Property().String(), + Fields: fields, + } + } + + id := l.ID().String() + return &LayerDocument{ + ID: id, + Name: l.Name(), + Visible: l.IsVisible(), + Scene: l.Scene().String(), + Infobox: infobox, + Group: group, + Item: item, + Plugin: l.Plugin().StringRef(), + Extension: l.Extension().StringRef(), + Property: l.Property().StringRef(), + Tags: NewLayerTagList(l.Tags()), + }, id +} + +func NewLayers(layers layer.List, f scene.IDList) ([]interface{}, []string) { + res := make([]interface{}, 0, len(layers)) + ids := make([]string, 0, len(layers)) + for _, d := range layers { + if d == nil { + continue + } + d2 := *d + if d2 == nil || f != nil && !f.Has(d2.Scene()) { + continue + } + r, id := NewLayer(d2) + res = append(res, r) + ids = append(ids, id) + } + return res, ids +} + +func (d *LayerDocument) Model() (*layer.Item, *layer.Group, error) { + if d.Item != nil { + li, err := d.ModelItem() + if err != nil { + return nil, nil, err + } + return li, nil, nil + } + if d.Group != nil { + lg, err := d.ModelGroup() + if err != nil { + return nil, nil, err + } + return nil, lg, nil + } + return nil, nil, errors.New("invalid layer") +} + +func (d *LayerDocument) ModelItem() (*layer.Item, error) { + lid, err := id.LayerIDFrom(d.ID) + if err != nil { + return nil, err + } + sid, err := id.SceneIDFrom(d.Scene) + if err != nil { + return nil, err + } + ib, err2 := ToModelInfobox(d.Infobox) + if err2 != nil { + return nil, err + } + + return layer.NewItem(). + ID(lid). + Name(d.Name). + IsVisible(d.Visible). + Plugin(id.PluginIDFromRef(d.Plugin)). + Extension(id.PluginExtensionIDFromRef(d.Extension)). + Property(id.PropertyIDFromRef(d.Property)). + Infobox(ib). + Scene(sid). + Tags(d.Tags.Model()). + // item + LinkedDataset(id.DatasetIDFromRef(d.Item.LinkedDataset)). + Build() +} + +func (d *LayerDocument) ModelGroup() (*layer.Group, error) { + lid, err := id.LayerIDFrom(d.ID) + if err != nil { + return nil, err + } + sid, err := id.SceneIDFrom(d.Scene) + if err != nil { + return nil, err + } + ib, err2 := ToModelInfobox(d.Infobox) + if err2 != nil { + return nil, err2 + } + + ids := make([]id.LayerID, 0, len(d.Group.Layers)) + for _, lgid := range d.Group.Layers { + lid, err := id.LayerIDFrom(lgid) + if err != nil { + return nil, err + } + ids = append(ids, lid) + } + + return layer.NewGroup(). + ID(lid). + Name(d.Name). + IsVisible(d.Visible). + Plugin(id.PluginIDFromRef(d.Plugin)). + Extension(id.PluginExtensionIDFromRef(d.Extension)). + Property(id.PropertyIDFromRef(d.Property)). + Infobox(ib). + Scene(sid). + Tags(d.Tags.Model()). + // group + Root(d.Group != nil && d.Group.Root). + Layers(layer.NewIDList(ids)). + LinkedDatasetSchema(id.DatasetSchemaIDFromRef(d.Group.LinkedDatasetSchema)). + Build() +} + +func ToModelInfobox(ib *LayerInfoboxDocument) (*layer.Infobox, error) { + if ib == nil { + return nil, nil + } + pid, err := id.PropertyIDFrom(ib.Property) + if err != nil { + return nil, err + } + fields := make([]*layer.InfoboxField, 0, len(ib.Fields)) + for _, f := range ib.Fields { + iid, err := id.InfoboxFieldIDFrom(f.ID) + if err != nil { + return nil, err + } + pid, err := id.PluginIDFrom(f.Plugin) + if err != nil { + return nil, err + } + prid, err := id.PropertyIDFrom(f.Property) + if err != nil { + return nil, err + } + ibf, err := layer.NewInfoboxField(). + ID(iid). + Plugin(pid). + Extension(id.PluginExtensionID(f.Extension)). + Property(prid). + Build() + if err != nil { + return nil, err + } + fields = append(fields, ibf) + } + return layer.NewInfobox(fields, pid), nil +} + +func NewLayerTagList(list *layer.TagList) LayerTagListDocument { + if list.IsEmpty() { + return nil + } + + tags := list.Tags() + if len(tags) == 0 { + return nil + } + res := make([]LayerTagDocument, 0, len(tags)) + for _, t := range tags { + if t == nil { + return nil + } + if td := NewLayerTag(t); td != nil { + res = append(res, *td) + } + } + return res +} + +func (d *LayerTagListDocument) Model() *layer.TagList { + if d == nil { + return nil + } + + tags := make([]layer.Tag, 0, len(*d)) + for _, t := range *d { + if ti := t.Model(); ti != nil { + tags = append(tags, ti) + } + } + return layer.NewTagList(tags) +} + +func NewLayerTag(t layer.Tag) *LayerTagDocument { + var group bool + var tags []LayerTagDocument + + if tg := layer.TagGroupFrom(t); tg != nil { + group = true + children := tg.Children() + tags = make([]LayerTagDocument, 0, len(children)) + for _, c := range children { + if ct := NewLayerTag(c); ct != nil { + tags = append(tags, *ct) + } + } + } else if ti := layer.TagItemFrom(t); ti == nil { + return nil + } + return &LayerTagDocument{ + ID: t.ID().String(), + Group: group, + Tags: tags, + } +} + +func (d *LayerTagDocument) Model() layer.Tag { + if d == nil { + return nil + } + + tid := id.TagIDFromRef(&d.ID) + if tid == nil { + return nil + } + + if d.Group { + tags := make([]*layer.TagItem, 0, len(d.Tags)) + for _, t := range d.Tags { + if ti := layer.TagItemFrom(t.Model()); ti != nil { + tags = append(tags, ti) + } + } + return layer.NewTagGroup(*tid, tags) + } + return layer.NewTagItem(*tid) +} diff --git a/server/internal/infrastructure/mongo/mongodoc/pagination.go b/server/internal/infrastructure/mongo/mongodoc/pagination.go new file mode 100644 index 000000000..ce0f75042 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/pagination.go @@ -0,0 +1,49 @@ +package mongodoc + +import ( + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" +) + +type Pagination struct { + Before *string + After *string + First *int + Last *int +} + +func PaginationFrom(pagination *usecase.Pagination) *Pagination { + if pagination == nil { + return nil + } + return &Pagination{ + Before: (*string)(pagination.Before), + After: (*string)(pagination.After), + First: pagination.First, + Last: pagination.Last, + } +} + +func (p *Pagination) SortDirection() int { + if p != nil && p.Last != nil { + return -1 + } + return 1 +} + +func (p *Pagination) Parameters() (limit int64, op string, cursor *string, err error) { + if first, after := p.First, p.After; first != nil { + limit = int64(*first) + op = "$gt" + cursor = after + return + } + if last, before := p.Last, p.Before; last != nil { + limit = int64(*last) + op = "$lt" + cursor = before + return + } + return 0, "", nil, errors.New("neither first nor last are set") +} diff --git a/server/internal/infrastructure/mongo/mongodoc/plugin.go b/server/internal/infrastructure/mongo/mongodoc/plugin.go new file mode 100644 index 000000000..99a60c87e --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/plugin.go @@ -0,0 +1,200 @@ +package mongodoc + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "go.mongodb.org/mongo-driver/bson" +) + +type PluginDocument struct { + ID string + Name map[string]string + Author string + Description map[string]string + RepositoryURL string + Extensions []PluginExtensionDocument + Schema *string + Scene *string `bson:",omitempty"` +} + +type PluginExtensionDocument struct { + ID string + Type string + Name map[string]string + Description map[string]string + Icon string + Schema string + Visualizer string `bson:",omitempty"` + SingleOnly bool + WidgetLayout *WidgetLayoutDocument +} + +type WidgetLayoutDocument struct { + Extendable *WidgetExtendableDocument + Extended bool + Floating bool + DefaultLocation *WidgetLocationDocument +} + +type WidgetExtendableDocument struct { + Vertically bool + Horizontally bool +} + +type WidgetLocationDocument struct { + Zone string + Section string + Area string +} + +type PluginConsumer struct { + Rows []*plugin.Plugin +} + +func (c *PluginConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc PluginDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + plugin, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, plugin) + return nil +} + +func NewPlugin(plugin *plugin.Plugin) (*PluginDocument, string) { + if plugin == nil { + return nil, "" + } + + extensions := plugin.Extensions() + extensionsDoc := make([]PluginExtensionDocument, 0, len(extensions)) + for _, e := range extensions { + extensionsDoc = append(extensionsDoc, PluginExtensionDocument{ + ID: string(e.ID()), + Type: string(e.Type()), + Name: e.Name(), + Description: e.Description(), + Icon: e.Icon(), + Schema: e.Schema().String(), + Visualizer: string(e.Visualizer()), + SingleOnly: e.SingleOnly(), + WidgetLayout: NewWidgetLayout(e.WidgetLayout()), + }) + } + + pid := plugin.ID().String() + return &PluginDocument{ + ID: pid, + Name: plugin.Name(), + Description: plugin.Description(), + Author: plugin.Author(), + RepositoryURL: plugin.RepositoryURL(), + Extensions: extensionsDoc, + Schema: plugin.Schema().StringRef(), + Scene: plugin.ID().Scene().StringRef(), + }, pid +} + +func (d *PluginDocument) Model() (*plugin.Plugin, error) { + if d == nil { + return nil, nil + } + + pid, err := id.PluginIDFrom(d.ID) + if err != nil { + return nil, err + } + + extensions := make([]*plugin.Extension, 0, len(d.Extensions)) + for _, e := range d.Extensions { + psid, err := id.PropertySchemaIDFrom(e.Schema) + if err != nil { + return nil, err + } + extension, err := plugin.NewExtension(). + ID(id.PluginExtensionID(e.ID)). + Type(plugin.ExtensionType(e.Type)). + Name(e.Name). + Description(e.Description). + Icon(e.Icon). + SingleOnly(e.SingleOnly). + WidgetLayout(e.WidgetLayout.Model()). + Schema(psid). + Build() + if err != nil { + return nil, err + } + extensions = append(extensions, extension) + } + + return plugin.New(). + ID(pid). + Name(d.Name). + Description(d.Description). + Author(d.Author). + RepositoryURL(d.RepositoryURL). + Extensions(extensions). + Schema(id.PropertySchemaIDFromRef(d.Schema)). + Build() +} + +func NewWidgetLayout(l *plugin.WidgetLayout) *WidgetLayoutDocument { + if l == nil { + return nil + } + + return &WidgetLayoutDocument{ + Extendable: &WidgetExtendableDocument{ + Vertically: l.VerticallyExtendable(), + Horizontally: l.HorizontallyExtendable(), + }, + Extended: l.Extended(), + Floating: l.Floating(), + DefaultLocation: NewWidgetLocation(l.DefaultLocation()), + } +} + +func (d *WidgetLayoutDocument) Model() *plugin.WidgetLayout { + if d == nil { + return nil + } + + return plugin.NewWidgetLayout( + d.Extendable.Horizontally, + d.Extendable.Vertically, + d.Extended, + d.Floating, + d.DefaultLocation.Model(), + ).Ref() +} + +func NewWidgetLocation(l *plugin.WidgetLocation) *WidgetLocationDocument { + if l == nil { + return nil + } + + return &WidgetLocationDocument{ + Zone: string(l.Zone), + Section: string(l.Section), + Area: string(l.Area), + } +} + +func (d *WidgetLocationDocument) Model() *plugin.WidgetLocation { + if d == nil { + return nil + } + + return &plugin.WidgetLocation{ + Zone: plugin.WidgetZoneType(d.Zone), + Section: plugin.WidgetSectionType(d.Section), + Area: plugin.WidgetAreaType(d.Area), + } +} diff --git a/server/internal/infrastructure/mongo/mongodoc/project.go b/server/internal/infrastructure/mongo/mongodoc/project.go new file mode 100644 index 000000000..da36df7fe --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/project.go @@ -0,0 +1,123 @@ +package mongodoc + +import ( + "net/url" + "time" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +type ProjectDocument struct { + ID string + Archived bool + IsBasicAuthActive bool + BasicAuthUsername string + BasicAuthPassword string + UpdatedAt time.Time + PublishedAt time.Time + Name string + Description string + Alias string + ImageURL string + PublicTitle string + PublicDescription string + PublicImage string + PublicNoIndex bool + Team string + Visualizer string + PublishmentStatus string +} + +type ProjectConsumer struct { + Rows []*project.Project +} + +func (c *ProjectConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc ProjectDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + project, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, project) + return nil +} + +func NewProject(project *project.Project) (*ProjectDocument, string) { + pid := project.ID().String() + + imageURL := "" + if u := project.ImageURL(); u != nil { + imageURL = u.String() + } + + return &ProjectDocument{ + ID: pid, + Archived: project.IsArchived(), + IsBasicAuthActive: project.IsBasicAuthActive(), + BasicAuthUsername: project.BasicAuthUsername(), + BasicAuthPassword: project.BasicAuthPassword(), + UpdatedAt: project.UpdatedAt(), + PublishedAt: project.PublishedAt(), + Name: project.Name(), + Description: project.Description(), + Alias: project.Alias(), + ImageURL: imageURL, + PublicTitle: project.PublicTitle(), + PublicDescription: project.PublicDescription(), + PublicImage: project.PublicImage(), + PublicNoIndex: project.PublicNoIndex(), + Team: project.Team().String(), + Visualizer: string(project.Visualizer()), + PublishmentStatus: string(project.PublishmentStatus()), + }, pid +} + +func (d *ProjectDocument) Model() (*project.Project, error) { + pid, err := id.ProjectIDFrom(d.ID) + if err != nil { + return nil, err + } + tid, err := id.TeamIDFrom(d.Team) + if err != nil { + return nil, err + } + + var imageURL *url.URL + if d.ImageURL != "" { + if imageURL, err = url.Parse(d.ImageURL); err != nil { + imageURL = nil + } + } + + return project.New(). + ID(pid). + IsArchived(d.Archived). + IsBasicAuthActive(d.IsBasicAuthActive). + BasicAuthUsername(d.BasicAuthUsername). + BasicAuthPassword(d.BasicAuthPassword). + UpdatedAt(d.UpdatedAt). + PublishedAt(d.PublishedAt). + Name(d.Name). + Description(d.Description). + Alias(d.Alias). + ImageURL(imageURL). + PublicTitle(d.PublicTitle). + PublicDescription(d.PublicDescription). + PublicImage(d.PublicImage). + PublicNoIndex(d.PublicNoIndex). + Team(tid). + Visualizer(visualizer.Visualizer(d.Visualizer)). + PublishmentStatus(project.PublishmentStatus(d.PublishmentStatus)). + Build() +} diff --git a/server/internal/infrastructure/mongo/mongodoc/property.go b/server/internal/infrastructure/mongo/mongodoc/property.go new file mode 100644 index 000000000..05c5608a0 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/property.go @@ -0,0 +1,317 @@ +package mongodoc + +import ( + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" +) + +const ( + typePropertyItemGroup = "group" + typePropertyItemGroupList = "grouplist" +) + +type PropertyDocument struct { + ID string + Scene string + Schema string `bson:",omitempty"` // compatibility + SchemaPlugin string + SchemaName string + Items []*PropertyItemDocument +} + +type PropertyFieldDocument struct { + Field string + Type string + Links []*PropertyLinkDocument + Value interface{} +} + +type PropertyLinkDocument struct { + Schema *string + Dataset *string + Field *string +} + +type PropertyItemDocument struct { + Type string + ID string + SchemaGroup string + Groups []*PropertyItemDocument + Fields []*PropertyFieldDocument +} + +type PropertyConsumer struct { + Rows []*property.Property +} + +func (c *PropertyConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc PropertyDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + property, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, property) + return nil +} + +type PropertyBatchConsumer struct { + Size int + Callback func([]*property.Property) error + consumer *BatchConsumer +} + +func (c *PropertyBatchConsumer) Consume(raw bson.Raw) error { + if c.consumer == nil { + c.consumer = &BatchConsumer{ + Size: c.Size, + Callback: func(rows []bson.Raw) error { + properties := make([]*property.Property, 0, len(rows)) + + for _, r := range rows { + var doc PropertyDocument + if err := bson.Unmarshal(r, &doc); err != nil { + return err + } + property, err := doc.Model() + if err != nil { + return err + } + + properties = append(properties, property) + } + + return c.Callback(properties) + }, + } + } + + return c.consumer.Consume(raw) +} + +func newPropertyField(f *property.Field) *PropertyFieldDocument { + if f == nil { + return nil + } + + field := &PropertyFieldDocument{ + Field: string(f.Field()), + Type: string(f.Type()), + Value: f.Value().Interface(), + } + + if links := f.Links().Links(); links != nil { + field.Links = make([]*PropertyLinkDocument, 0, len(links)) + for _, l := range links { + field.Links = append(field.Links, &PropertyLinkDocument{ + Schema: l.DatasetSchema().StringRef(), + Dataset: l.Dataset().StringRef(), + Field: l.DatasetSchemaField().StringRef(), + }) + } + } + + return field +} + +func newPropertyItem(f property.Item) *PropertyItemDocument { + if f == nil { + return nil + } + + t := "" + var items []*PropertyItemDocument + var fields []*PropertyFieldDocument + + if g := property.ToGroup(f); g != nil { + t = typePropertyItemGroup + pfields := g.Fields(nil) + fields = make([]*PropertyFieldDocument, 0, len(pfields)) + for _, r := range pfields { + fields = append(fields, newPropertyField(r)) + } + } else if g := property.ToGroupList(f); g != nil { + t = typePropertyItemGroupList + pgroups := g.Groups() + items = make([]*PropertyItemDocument, 0, len(pgroups)) + for _, r := range pgroups { + items = append(items, newPropertyItem(r)) + } + } + + return &PropertyItemDocument{ + Type: t, + ID: f.ID().String(), + SchemaGroup: string(f.SchemaGroup()), + Groups: items, + Fields: fields, + } +} + +func NewProperty(property *property.Property) (*PropertyDocument, string) { + if property == nil { + return nil, "" + } + + pid := property.ID().String() + items := property.Items() + doc := PropertyDocument{ + ID: pid, + SchemaPlugin: property.Schema().Plugin().String(), + SchemaName: property.Schema().ID(), + Items: make([]*PropertyItemDocument, 0, len(items)), + Scene: property.Scene().String(), + } + for _, f := range items { + doc.Items = append(doc.Items, newPropertyItem(f)) + } + return &doc, pid +} + +func NewProperties(properties []*property.Property, f scene.IDList) ([]interface{}, []string) { + if properties == nil { + return nil, nil + } + + res := make([]interface{}, 0, len(properties)) + ids := make([]string, 0, len(properties)) + for _, d := range properties { + if d == nil || f != nil && !f.Has(d.Scene()) { + continue + } + r, id := NewProperty(d) + res = append(res, r) + ids = append(ids, id) + } + return res, ids +} + +func toModelPropertyField(f *PropertyFieldDocument) *property.Field { + if f == nil { + return nil + } + + var flinks *property.Links + if f.Links != nil { + links := make([]*property.Link, 0, len(f.Links)) + for _, l := range f.Links { + var link *property.Link + d := id.DatasetIDFromRef(l.Dataset) + ds := id.DatasetSchemaIDFromRef(l.Schema) + df := id.DatasetFieldIDFromRef(l.Field) + if d != nil && ds != nil && df != nil { + link = property.NewLink(*d, *ds, *df) + } else if ds != nil && df != nil { + link = property.NewLinkFieldOnly(*ds, *df) + } else { + continue + } + links = append(links, link) + } + flinks = property.NewLinks(links) + } + + vt := property.ValueType(f.Type) + field := property.NewField(property.FieldID(f.Field)). + Value(property.NewOptionalValue(vt, toModelPropertyValue(f.Value, f.Type))). + Links(flinks). + Build() + + return field +} + +func toModelPropertyItem(f *PropertyItemDocument) (property.Item, error) { + if f == nil { + return nil, nil + } + + var i property.Item + var err error + var iid id.PropertyItemID + + iid, err = id.PropertyItemIDFrom(f.ID) + if err != nil { + return nil, err + } + gid := id.PropertySchemaGroupID(f.SchemaGroup) + + if f.Type == typePropertyItemGroup { + fields := make([]*property.Field, 0, len(f.Fields)) + for _, i := range f.Fields { + fields = append(fields, toModelPropertyField(i)) + } + + i, err = property.NewGroup(). + ID(iid). + SchemaGroup(gid). + Fields(fields). + Build() + } else if f.Type == typePropertyItemGroupList { + items := make([]*property.Group, 0, len(f.Groups)) + for _, i := range f.Groups { + i2, err := toModelPropertyItem(i) + if err != nil { + return nil, err + } + if i3 := property.ToGroup(i2); i3 != nil { + items = append(items, i3) + } + } + + i, err = property.NewGroupList(). + ID(iid). + SchemaGroup(gid). + Groups(items). + Build() + } + + return i, err +} + +func (doc *PropertyDocument) Model() (*property.Property, error) { + if doc == nil { + return nil, nil + } + + pid, err := id.PropertyIDFrom(doc.ID) + if err != nil { + return nil, err + } + sid, err := id.SceneIDFrom(doc.Scene) + if err != nil { + return nil, err + } + pl, err := id.PluginIDFrom(doc.SchemaPlugin) + if err != nil { + return nil, err + } + + items := make([]property.Item, 0, len(doc.Items)) + for _, f := range doc.Items { + i, err := toModelPropertyItem(f) + if err != nil { + return nil, err + } + items = append(items, i) + } + + return property.New(). + ID(pid). + Scene(sid). + Schema(id.NewPropertySchemaID(pl, doc.SchemaName)). + Items(items). + Build() +} + +func toModelPropertyValue(v interface{}, t string) *property.Value { + return property.ValueType(t).ValueFrom(convertDToM(v)) +} diff --git a/server/internal/infrastructure/mongo/mongodoc/property_schema.go b/server/internal/infrastructure/mongo/mongodoc/property_schema.go new file mode 100644 index 000000000..f2d0d357f --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/property_schema.go @@ -0,0 +1,314 @@ +package mongodoc + +import ( + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type PropertySchemaDocument struct { + ID string + Scene *string `bson:",omitempty"` + Version int + Groups []*PropertySchemaGroupDocument + LinkableFields *PropertyLinkableFieldsDocument +} + +type PropertySchemaGroupDocument struct { + ID string + Fields []*PropertySchemaFieldDocument + List bool + IsAvailableIf *PropertyConditonDocument + Title map[string]string +} + +type PropertySchemaFieldDocument struct { + ID string + Type string + Name map[string]string + Description map[string]string + Prefix string + Suffix string + DefaultValue interface{} + UI *string + Min *float64 + Max *float64 + Choices []PropertySchemaFieldChoiceDocument +} + +type PropertySchemaFieldChoiceDocument struct { + Key string + Label map[string]string +} + +type PropertyLinkableFieldsDocument struct { + LatLng *PropertySchemaFieldPointerDocument + URL *PropertySchemaFieldPointerDocument +} + +type PropertySchemaFieldPointerDocument struct { + SchemaGroupID string + FieldID string +} + +type PropertyConditonDocument struct { + Field string + Type string + Value interface{} +} + +type PropertySchemaConsumer struct { + Rows []*property.Schema +} + +func (c *PropertySchemaConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc PropertySchemaDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + propertySchema, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, propertySchema) + return nil +} + +func NewPropertySchemaField(f *property.SchemaField) *PropertySchemaFieldDocument { + if f == nil { + return nil + } + + field := &PropertySchemaFieldDocument{ + ID: string(f.ID()), + Name: f.Title(), + Suffix: f.Suffix(), + Prefix: f.Prefix(), + Description: f.Description(), + Type: string(f.Type()), + DefaultValue: f.DefaultValue().Value(), + UI: f.UI().StringRef(), + Min: f.Min(), + Max: f.Max(), + } + if choices := f.Choices(); choices != nil { + field.Choices = make([]PropertySchemaFieldChoiceDocument, 0, len(choices)) + for _, c := range choices { + field.Choices = append(field.Choices, PropertySchemaFieldChoiceDocument{ + Key: c.Key, + Label: c.Title, + }) + } + } + return field +} + +func NewPropertySchema(m *property.Schema) (*PropertySchemaDocument, string) { + if m == nil { + return nil, "" + } + + pgroups := m.Groups().Groups() + groups := make([]*PropertySchemaGroupDocument, 0, len(pgroups)) + for _, f := range pgroups { + groups = append(groups, newPropertySchemaGroup(f)) + } + + id := m.ID().String() + return &PropertySchemaDocument{ + ID: id, + Scene: m.Scene().StringRef(), + Version: m.Version(), + Groups: groups, + LinkableFields: ToDocPropertyLinkableFields(m.LinkableFields()), + }, id +} + +func NewPropertySchemas(ps []*property.Schema, f scene.IDList) ([]interface{}, []string) { + if ps == nil { + return nil, nil + } + + res := make([]interface{}, 0, len(ps)) + ids := make([]string, 0, len(ps)) + for _, d := range ps { + if d == nil { + continue + } + if s := d.Scene(); s != nil && f != nil && !f.Has(*s) { + continue + } + r, id := NewPropertySchema(d) + res = append(res, r) + ids = append(ids, id) + } + return res, ids +} + +func ToModelPropertySchemaField(f *PropertySchemaFieldDocument) (*property.SchemaField, error) { + if f == nil { + return nil, nil + } + + var choices []property.SchemaFieldChoice + if f.Choices != nil { + choices = make([]property.SchemaFieldChoice, 0, len(f.Choices)) + for _, c := range f.Choices { + choices = append(choices, property.SchemaFieldChoice{ + Key: c.Key, + Title: c.Label, + }) + } + } + + vt := property.ValueType(f.Type) + return property.NewSchemaField(). + ID(id.PropertyFieldID(f.ID)). + Type(vt). + Name(f.Name). + Description(f.Description). + Prefix(f.Prefix). + Suffix(f.Suffix). + DefaultValue(vt.ValueFrom(f.DefaultValue)). + UIRef(property.SchemaFieldUIFromRef(f.UI)). + MinRef(f.Min). + MaxRef(f.Max). + Choices(choices). + Build() +} + +func (doc *PropertySchemaDocument) Model() (*property.Schema, error) { + if doc == nil { + return nil, nil + } + + pid, err := id.PropertySchemaIDFrom(doc.ID) + if err != nil { + return nil, err + } + + groups := make([]*property.SchemaGroup, 0, len(doc.Groups)) + for _, g := range doc.Groups { + g2, err := g.Model() + if err != nil { + return nil, err + } + groups = append(groups, g2) + } + + return property.NewSchema(). + ID(pid). + Version(doc.Version). + Groups(property.NewSchemaGroupList(groups)). + LinkableFields(toModelPropertyLinkableFields(doc.LinkableFields)). + Build() +} + +func newPropertyCondition(c *property.Condition) *PropertyConditonDocument { + if c == nil { + return nil + } + + return &PropertyConditonDocument{ + Field: string(c.Field), + Type: string(c.Value.Type()), + Value: c.Value.Interface(), + } +} + +func toModelPropertyCondition(d *PropertyConditonDocument) *property.Condition { + if d == nil { + return nil + } + + return &property.Condition{ + Field: id.PropertyFieldID(d.Field), + Value: toModelPropertyValue(d.Value, d.Type), + } +} + +func newPropertySchemaGroup(p *property.SchemaGroup) *PropertySchemaGroupDocument { + if p == nil { + return nil + } + + pfields := p.Fields() + fields := make([]*PropertySchemaFieldDocument, 0, len(pfields)) + for _, f := range pfields { + fields = append(fields, NewPropertySchemaField(f)) + } + + return &PropertySchemaGroupDocument{ + ID: string(p.ID()), + List: p.IsList(), + IsAvailableIf: newPropertyCondition(p.IsAvailableIf()), + Title: p.Title(), + Fields: fields, + } +} + +func (d *PropertySchemaGroupDocument) Model() (*property.SchemaGroup, error) { + if d == nil { + return nil, nil + } + + fields := make([]*property.SchemaField, 0, len(d.Fields)) + for _, f := range d.Fields { + field, err := ToModelPropertySchemaField(f) + if err != nil { + return nil, err + } + fields = append(fields, field) + } + + return property.NewSchemaGroup(). + ID(id.PropertySchemaGroupID(d.ID)). + IsList(d.List). + Title(d.Title). + IsAvailableIf(toModelPropertyCondition(d.IsAvailableIf)). + Fields(fields). + Build() +} + +func ToDocPropertyLinkableFields(l property.LinkableFields) *PropertyLinkableFieldsDocument { + return &PropertyLinkableFieldsDocument{ + LatLng: newDocPropertyPointer(l.LatLng), + URL: newDocPropertyPointer(l.URL), + } +} + +func toModelPropertyLinkableFields(l *PropertyLinkableFieldsDocument) property.LinkableFields { + if l == nil { + return property.LinkableFields{} + } + return property.LinkableFields{ + LatLng: toModelPropertyPointer(l.LatLng), + URL: toModelPropertyPointer(l.URL), + } +} + +func toModelPropertyPointer(p *PropertySchemaFieldPointerDocument) *property.SchemaFieldPointer { + if p == nil { + return nil + } + return &property.SchemaFieldPointer{ + SchemaGroup: property.SchemaGroupID(p.SchemaGroupID), + Field: property.FieldID(p.FieldID), + } +} + +func newDocPropertyPointer(p *property.SchemaFieldPointer) *PropertySchemaFieldPointerDocument { + if p == nil { + return nil + } + return &PropertySchemaFieldPointerDocument{ + SchemaGroupID: p.SchemaGroup.String(), + FieldID: p.Field.String(), + } +} diff --git a/server/internal/infrastructure/mongo/mongodoc/scene.go b/server/internal/infrastructure/mongo/mongodoc/scene.go new file mode 100644 index 000000000..98e4c50d6 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/scene.go @@ -0,0 +1,276 @@ +package mongodoc + +import ( + "errors" + "time" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type SceneDocument struct { + ID string + Project string + Team string + RootLayer string + Widgets []SceneWidgetDocument + AlignSystem *WidgetAlignSystemDocument + Plugins []ScenePluginDocument + UpdateAt time.Time + Property string + Clusters []SceneClusterDocument +} + +type SceneWidgetDocument struct { + ID string + Plugin string + Extension string + Property string + Enabled bool + Extended bool +} + +type ScenePluginDocument struct { + Plugin string + Property *string +} + +type SceneClusterDocument struct { + ID string + Name string + Property string +} + +type SceneConsumer struct { + Rows scene.List +} + +func (c *SceneConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc SceneDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + scene, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, scene) + return nil +} + +type SceneIDDocument struct { + ID string +} + +type SceneIDConsumer struct { + Rows []id.SceneID +} + +func (c *SceneIDConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc SceneIDDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + scene, err := id.SceneIDFrom(doc.ID) + if err != nil { + return err + } + c.Rows = append(c.Rows, scene) + return nil +} + +func NewScene(scene *scene.Scene) (*SceneDocument, string) { + widgets := scene.Widgets().Widgets() + plugins := scene.Plugins().Plugins() + clusters := scene.Clusters().Clusters() + + widgetsDoc := make([]SceneWidgetDocument, 0, len(widgets)) + pluginsDoc := make([]ScenePluginDocument, 0, len(plugins)) + clsuterDoc := make([]SceneClusterDocument, 0, len(clusters)) + for _, w := range widgets { + widgetsDoc = append(widgetsDoc, SceneWidgetDocument{ + ID: w.ID().String(), + Plugin: w.Plugin().String(), + Extension: string(w.Extension()), + Property: w.Property().String(), + Enabled: w.Enabled(), + Extended: w.Extended(), + }) + } + + for _, sp := range plugins { + pluginsDoc = append(pluginsDoc, ScenePluginDocument{ + Plugin: sp.Plugin().String(), + Property: sp.Property().StringRef(), + }) + } + + for _, cl := range clusters { + clsuterDoc = append(clsuterDoc, SceneClusterDocument{ + ID: cl.ID().String(), + Name: cl.Name(), + Property: cl.Property().String(), + }) + } + + id := scene.ID().String() + return &SceneDocument{ + ID: id, + Project: scene.Project().String(), + Team: scene.Team().String(), + RootLayer: scene.RootLayer().String(), + Widgets: widgetsDoc, + Plugins: pluginsDoc, + AlignSystem: NewWidgetAlignSystem(scene.Widgets().Alignment()), + UpdateAt: scene.UpdatedAt(), + Property: scene.Property().String(), + Clusters: clsuterDoc, + }, id +} + +func (d *SceneDocument) Model() (*scene.Scene, error) { + sid, err := id.SceneIDFrom(d.ID) + if err != nil { + return nil, err + } + projectID, err := id.ProjectIDFrom(d.Project) + if err != nil { + return nil, err + } + prid, err := id.PropertyIDFrom(d.Property) + if err != nil { + return nil, err + } + tid, err := id.TeamIDFrom(d.Team) + if err != nil { + return nil, err + } + lid, err := id.LayerIDFrom(d.RootLayer) + if err != nil { + return nil, err + } + + ws := make([]*scene.Widget, 0, len(d.Widgets)) + ps := make([]*scene.Plugin, 0, len(d.Plugins)) + clusters := make([]*scene.Cluster, 0, len(d.Clusters)) + + for _, w := range d.Widgets { + wid, err := id.WidgetIDFrom(w.ID) + if err != nil { + return nil, err + } + pid, err := id.PluginIDFrom(w.Plugin) + if err != nil { + return nil, err + } + prid, err := id.PropertyIDFrom(w.Property) + if err != nil { + return nil, err + } + sw, err := scene.NewWidget( + wid, + pid, + id.PluginExtensionID(w.Extension), + prid, + w.Enabled, + w.Extended, + ) + if err != nil { + return nil, err + } + ws = append(ws, sw) + } + + for _, p := range d.Plugins { + pid, err := id.PluginIDFrom(p.Plugin) + if err != nil { + return nil, err + } + ps = append(ps, scene.NewPlugin(pid, id.PropertyIDFromRef(p.Property))) + } + + for _, c := range d.Clusters { + cid, err := id.ClusterIDFrom(c.ID) + if err != nil { + return nil, err + } + pid, err := id.PropertyIDFrom(c.Property) + if err != nil { + return nil, err + } + cluster, err := scene.NewCluster(cid, c.Name, pid) + if err != nil { + return nil, err + } + clusters = append(clusters, cluster) + } + + cl := scene.NewClusterListFrom(clusters) + + return scene.New(). + ID(sid). + Project(projectID). + Team(tid). + RootLayer(lid). + Clusters(cl). + Widgets(scene.NewWidgets(ws, d.AlignSystem.Model())). + Plugins(scene.NewPlugins(ps)). + UpdatedAt(d.UpdateAt). + Property(prid). + Build() +} + +type SceneLockConsumer struct { + Rows []scene.LockMode +} + +type SceneLockDocument struct { + Scene string + Lock string +} + +func (c *SceneLockConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc SceneLockDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + _, sceneLock, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, sceneLock) + return nil +} + +func NewSceneLock(sceneID id.SceneID, lock scene.LockMode) *SceneLockDocument { + return &SceneLockDocument{ + Scene: sceneID.String(), + Lock: string(lock), + } +} + +func (d *SceneLockDocument) Model() (id.SceneID, scene.LockMode, error) { + sceneID, err := id.SceneIDFrom(d.Scene) + if err != nil { + return sceneID, scene.LockMode(""), err + } + sceneLock, ok := scene.LockMode(d.Lock).Validate() + if !ok { + return sceneID, sceneLock, errors.New("invalid scene lock mode") + } + return sceneID, sceneLock, nil +} diff --git a/server/internal/infrastructure/mongo/mongodoc/scene_align.go b/server/internal/infrastructure/mongo/mongodoc/scene_align.go new file mode 100644 index 000000000..16677e248 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/scene_align.go @@ -0,0 +1,146 @@ +package mongodoc + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type WidgetAlignSystemDocument struct { + Inner *WidgetZoneDocument + Outer *WidgetZoneDocument +} +type WidgetZoneDocument struct { + Left *WidgetSectionDocument + Center *WidgetSectionDocument + Right *WidgetSectionDocument +} + +type WidgetSectionDocument struct { + Top *WidgetAreaDocument + Middle *WidgetAreaDocument + Bottom *WidgetAreaDocument +} + +type WidgetAreaDocument struct { + WidgetIDs []string + Align string +} + +func NewWidgetAlignSystem(was *scene.WidgetAlignSystem) *WidgetAlignSystemDocument { + if was == nil { + return nil + } + + d := &WidgetAlignSystemDocument{ + Inner: NewWidgetZone(was.Zone(scene.WidgetZoneInner)), + Outer: NewWidgetZone(was.Zone(scene.WidgetZoneOuter)), + } + + if d.Inner == nil && d.Outer == nil { + return nil + } + return d +} + +func NewWidgetZone(z *scene.WidgetZone) *WidgetZoneDocument { + if z == nil { + return nil + } + + d := &WidgetZoneDocument{ + Left: NewWidgetSection(z.Section(scene.WidgetSectionLeft)), + Center: NewWidgetSection(z.Section(scene.WidgetSectionCenter)), + Right: NewWidgetSection(z.Section(scene.WidgetSectionRight)), + } + + if d.Left == nil && d.Center == nil && d.Right == nil { + return nil + } + return d +} + +func NewWidgetSection(s *scene.WidgetSection) *WidgetSectionDocument { + if s == nil { + return nil + } + + d := &WidgetSectionDocument{ + Top: NewWidgetArea(s.Area(scene.WidgetAreaTop)), + Middle: NewWidgetArea(s.Area(scene.WidgetAreaMiddle)), + Bottom: NewWidgetArea(s.Area(scene.WidgetAreaBottom)), + } + + if d.Top == nil && d.Middle == nil && d.Bottom == nil { + return nil + } + return d +} + +func NewWidgetArea(a *scene.WidgetArea) *WidgetAreaDocument { + if a == nil { + return nil + } + + return &WidgetAreaDocument{ + WidgetIDs: a.WidgetIDs().Strings(), + Align: string(a.Alignment()), + } +} + +func (d *WidgetAlignSystemDocument) Model() *scene.WidgetAlignSystem { + if d == nil { + return nil + } + + was := scene.NewWidgetAlignSystem() + was.SetZone(scene.WidgetZoneInner, d.Inner.Model()) + was.SetZone(scene.WidgetZoneOuter, d.Outer.Model()) + return was +} + +func (d *WidgetZoneDocument) Model() *scene.WidgetZone { + if d == nil { + return nil + } + + wz := scene.NewWidgetZone() + wz.SetSection(scene.WidgetSectionLeft, d.Left.Model()) + wz.SetSection(scene.WidgetSectionCenter, d.Center.Model()) + wz.SetSection(scene.WidgetSectionRight, d.Right.Model()) + return wz +} + +func (d *WidgetSectionDocument) Model() *scene.WidgetSection { + if d == nil { + return nil + } + + ws := scene.NewWidgetSection() + ws.SetArea(scene.WidgetAreaTop, d.Top.Model()) + ws.SetArea(scene.WidgetAreaMiddle, d.Middle.Model()) + ws.SetArea(scene.WidgetAreaBottom, d.Bottom.Model()) + return ws +} + +func (a *WidgetAreaDocument) Model() *scene.WidgetArea { + if a == nil { + return nil + } + + return scene.NewWidgetArea(stringsToWidgetIDs(a.WidgetIDs), scene.WidgetAlignType(a.Align)) +} + +func stringsToWidgetIDs(wids []string) []id.WidgetID { + if wids == nil { + return nil + } + var docids []id.WidgetID + for _, wid := range wids { + nid, err := id.WidgetIDFrom(wid) + if err != nil { + continue + } + docids = append(docids, nid) + } + return docids +} diff --git a/server/internal/infrastructure/mongo/mongodoc/tag.go b/server/internal/infrastructure/mongo/mongodoc/tag.go new file mode 100644 index 000000000..e1f737cb6 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/tag.go @@ -0,0 +1,181 @@ +package mongodoc + +import ( + "errors" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/tag" +) + +type TagDocument struct { + ID string + Label string + Scene string + Item *TagItemDocument + Group *TagGroupDocument +} + +type TagItemDocument struct { + Parent *string + LinkedDatasetFieldID *string + LinkedDatasetID *string + LinkedDatasetSchemaID *string +} + +type TagGroupDocument struct { + Tags []string +} + +type TagConsumer struct { + Rows []*tag.Tag + GroupRows []*tag.Group + ItemRows []*tag.Item +} + +func (c *TagConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc TagDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + ti, tg, err := doc.Model() + if err != nil { + return err + } + if ti != nil { + var t tag.Tag = ti + c.Rows = append(c.Rows, &t) + c.ItemRows = append(c.ItemRows, ti) + } + if tg != nil { + var t tag.Tag = tg + c.Rows = append(c.Rows, &t) + c.GroupRows = append(c.GroupRows, tg) + } + return nil +} + +func NewTag(t tag.Tag) (*TagDocument, string) { + var group *TagGroupDocument + var item *TagItemDocument + if tg := tag.GroupFrom(t); tg != nil { + group = &TagGroupDocument{ + Tags: tg.Tags().Strings(), + } + } + + if ti := tag.ItemFrom(t); ti != nil { + item = &TagItemDocument{ + Parent: ti.Parent().StringRef(), + LinkedDatasetFieldID: ti.LinkedDatasetFieldID().StringRef(), + LinkedDatasetID: ti.LinkedDatasetID().StringRef(), + LinkedDatasetSchemaID: ti.LinkedDatasetSchemaID().StringRef(), + } + } + + tid := t.ID().String() + return &TagDocument{ + ID: tid, + Label: t.Label(), + Scene: t.Scene().String(), + Item: item, + Group: group, + }, tid +} + +func NewTags(tags []*tag.Tag, f scene.IDList) ([]interface{}, []string) { + res := make([]interface{}, 0, len(tags)) + ids := make([]string, 0, len(tags)) + for _, d := range tags { + if d == nil { + continue + } + d2 := *d + if f != nil && !f.Has(d2.Scene()) { + continue + } + r, tid := NewTag(d2) + res = append(res, r) + ids = append(ids, tid) + } + return res, ids +} + +func (d *TagDocument) Model() (*tag.Item, *tag.Group, error) { + if d.Item != nil { + ti, err := d.ModelItem() + if err != nil { + return nil, nil, err + } + return ti, nil, nil + } + + if d.Group != nil { + tg, err := d.ModelGroup() + if err != nil { + return nil, nil, err + } + return nil, tg, nil + } + + return nil, nil, errors.New("invalid tag") +} + +func (d *TagDocument) ModelItem() (*tag.Item, error) { + if d.Item == nil { + return nil, nil + } + + tid, err := id.TagIDFrom(d.ID) + if err != nil { + return nil, err + } + sid, err := id.SceneIDFrom(d.Scene) + if err != nil { + return nil, err + } + + return tag.NewItem(). + ID(tid). + Label(d.Label). + Scene(sid). + Parent(id.TagIDFromRef(d.Item.Parent)). + LinkedDatasetSchemaID(id.DatasetSchemaIDFromRef(d.Item.LinkedDatasetSchemaID)). + LinkedDatasetID(id.DatasetIDFromRef(d.Item.LinkedDatasetID)). + LinkedDatasetFieldID(id.DatasetFieldIDFromRef(d.Item.LinkedDatasetFieldID)). + Build() +} + +func (d *TagDocument) ModelGroup() (*tag.Group, error) { + if d.Group == nil { + return nil, nil + } + + tid, err := id.TagIDFrom(d.ID) + if err != nil { + return nil, err + } + + sid, err := id.SceneIDFrom(d.Scene) + if err != nil { + return nil, err + } + + tags, err := id.TagIDListFrom(d.Group.Tags) + if err != nil { + return nil, err + } + + return tag.NewGroup(). + ID(tid). + Label(d.Label). + Scene(sid). + Tags(tags). + Build() +} diff --git a/server/internal/infrastructure/mongo/mongodoc/tag_test.go b/server/internal/infrastructure/mongo/mongodoc/tag_test.go new file mode 100644 index 000000000..9101c9fe3 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/tag_test.go @@ -0,0 +1,540 @@ +package mongodoc + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/tag" + "github.com/stretchr/testify/assert" + "go.mongodb.org/mongo-driver/bson" +) + +func TestNewTag(t *testing.T) { + sid := id.NewSceneID() + dssid := id.NewDatasetSchemaID() + dsid := id.NewDatasetID() + dssfid := id.NewDatasetFieldID() + ti, _ := tag.NewItem(). + NewID(). + Label("Item"). + Scene(sid). + LinkedDatasetFieldID(dssfid.Ref()). + LinkedDatasetID(dsid.Ref()). + LinkedDatasetSchemaID(dssid.Ref()). + Build() + tg, _ := tag.NewGroup(). + NewID(). + Label("group"). + Tags(tag.IDList{ti.ID()}). + Scene(sid). + Build() + type args struct { + t tag.Tag + } + + tests := []struct { + name string + args args + want *TagDocument + want1 string + }{ + { + name: "New tag group", + args: args{ + t: tg, + }, + want: &TagDocument{ + ID: tg.ID().String(), + Label: "group", + Scene: sid.String(), + Item: nil, + Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, + }, + want1: tg.ID().String(), + }, + { + name: "New tag item", + args: args{ + t: ti, + }, + want: &TagDocument{ + ID: ti.ID().String(), + Label: "Item", + Scene: sid.String(), + Item: &TagItemDocument{ + LinkedDatasetFieldID: dssfid.StringRef(), + LinkedDatasetID: dsid.StringRef(), + LinkedDatasetSchemaID: dssid.StringRef(), + }, + Group: nil, + }, + want1: ti.ID().String(), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + got, got1 := NewTag(tc.args.t) + assert.Equal(t, tc.want1, got1) + assert.Equal(t, tc.want, got) + }) + } +} + +func TestNewTags(t *testing.T) { + sid := id.NewSceneID() + ti, _ := tag.NewItem(). + NewID(). + Label("Item"). + Scene(sid). + Build() + tg, _ := tag.NewGroup(). + NewID(). + Label("group"). + Tags(id.TagIDList{ti.ID()}). + Scene(sid). + Build() + tgi := tag.Tag(tg) + + type args struct { + tags []*tag.Tag + f scene.IDList + } + + tests := []struct { + name string + args args + want []interface{} + want1 []string + }{ + { + name: "new tags", + args: args{ + tags: []*tag.Tag{ + &tgi, + }, + }, + want: []interface{}{ + &TagDocument{ + ID: tg.ID().String(), + Label: "group", + Scene: sid.String(), + Item: nil, + Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, + }, + }, + want1: []string{tgi.ID().String()}, + }, + { + name: "filtered tags 1", + args: args{ + tags: []*tag.Tag{ + &tgi, + }, + f: scene.IDList{tgi.Scene()}, + }, + want: []interface{}{ + &TagDocument{ + ID: tg.ID().String(), + Label: "group", + Scene: sid.String(), + Item: nil, + Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, + }, + }, + want1: []string{tgi.ID().String()}, + }, + { + name: "filtered tags 2", + args: args{ + tags: []*tag.Tag{ + &tgi, + }, + f: scene.IDList{}, + }, + want: []interface{}{}, + want1: []string{}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + got, got1 := NewTags(tc.args.tags, tc.args.f) + assert.Equal(t, tc.want, got) + assert.Equal(t, tc.want1, got1) + }) + } +} + +func TestFuncConsumer_Consume(t *testing.T) { + sid := id.NewSceneID() + tg, _ := tag.NewGroup(). + NewID(). + Label("group"). + Scene(sid). + Build() + ti, _ := tag.NewItem(). + NewID(). + Label("group"). + Scene(sid). + Build() + doc, _ := NewTag(tg) + doc1, _ := NewTag(ti) + r, _ := bson.Marshal(doc) + r1, _ := bson.Marshal(doc1) + type fields struct { + Rows []*tag.Tag + GroupRows []*tag.Group + ItemRows []*tag.Item + } + type args struct { + raw bson.Raw + } + + tests := []struct { + name string + fields fields + args args + wantErr bool + }{ + { + name: "nil row", + fields: fields{ + Rows: nil, + GroupRows: nil, + ItemRows: nil, + }, + args: args{ + raw: nil, + }, + wantErr: false, + }, + { + name: "consume tag group", + fields: fields{ + Rows: nil, + GroupRows: nil, + ItemRows: nil, + }, + args: args{ + raw: r, + }, + wantErr: false, + }, + { + name: "consume tag item", + fields: fields{ + Rows: nil, + GroupRows: nil, + ItemRows: nil, + }, + args: args{ + raw: r1, + }, + wantErr: false, + }, + { + name: "fail: unmarshal error", + fields: fields{ + Rows: nil, + GroupRows: nil, + ItemRows: nil, + }, + args: args{ + raw: []byte{}, + }, + wantErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + c := &TagConsumer{ + Rows: tc.fields.Rows, + GroupRows: tc.fields.GroupRows, + ItemRows: tc.fields.ItemRows, + } + + if err := c.Consume(tc.args.raw); tc.wantErr { + assert.Error(t, err) + } + }) + } +} + +func TestTagDocument_Model(t *testing.T) { + sid := id.NewSceneID() + dssid := id.NewDatasetSchemaID() + dsid := id.NewDatasetID() + dssfid := id.NewDatasetFieldID() + ti, _ := tag.NewItem(). + NewID(). + Label("Item"). + Scene(sid). + LinkedDatasetFieldID(dssfid.Ref()). + LinkedDatasetID(dsid.Ref()). + LinkedDatasetSchemaID(dssid.Ref()). + Build() + tg, _ := tag.NewGroup(). + NewID(). + Label("group"). + Tags(tag.IDList{ti.ID()}). + Scene(sid). + Build() + type fields struct { + ID string + Label string + Scene string + Item *TagItemDocument + Group *TagGroupDocument + } + + tests := []struct { + name string + fields fields + want *tag.Item + want1 *tag.Group + wantErr bool + }{ + { + name: "item model", + fields: fields{ + ID: ti.ID().String(), + Label: "Item", + Scene: sid.String(), + Item: &TagItemDocument{ + LinkedDatasetFieldID: dssfid.StringRef(), + LinkedDatasetID: dsid.StringRef(), + LinkedDatasetSchemaID: dssid.StringRef(), + }, + Group: nil, + }, + want: ti, + want1: nil, + wantErr: false, + }, + { + name: "group model", + fields: fields{ + ID: tg.ID().String(), + Label: "group", + Scene: sid.String(), + Item: nil, + Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, + }, + want: nil, + want1: tg, + wantErr: false, + }, + { + name: "fail: invalid tag", + fields: fields{}, + want: nil, + want1: nil, + wantErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + d := &TagDocument{ + ID: tc.fields.ID, + Label: tc.fields.Label, + Scene: tc.fields.Scene, + Item: tc.fields.Item, + Group: tc.fields.Group, + } + got, got1, err := d.Model() + if tc.wantErr { + assert.Error(t, err) + } else { + assert.Equal(t, tc.want, got) + assert.Equal(t, tc.want1, got1) + } + }) + } +} + +func TestTagDocument_ModelGroup(t *testing.T) { + sid := id.NewSceneID() + ti, _ := tag.NewItem(). + NewID(). + Label("Item"). + Scene(sid). + Build() + tg, _ := tag.NewGroup(). + NewID(). + Label("group"). + Tags(tag.IDList{ti.ID()}). + Scene(sid). + Build() + type fields struct { + ID string + Label string + Scene string + Item *TagItemDocument + Group *TagGroupDocument + } + + tests := []struct { + name string + fields fields + want *tag.Group + wantErr bool + }{ + { + name: "invalid id", + fields: fields{ + ID: "xxx", + Group: &TagGroupDocument{}, + }, + want: nil, + wantErr: true, + }, + { + name: "invalid id", + fields: fields{ + ID: id.NewTagID().String(), + Scene: "xxx", + Group: &TagGroupDocument{}, + }, + want: nil, + wantErr: true, + }, + { + name: "invalid item id", + fields: fields{ + ID: id.NewTagID().String(), + Scene: id.NewSceneID().String(), + Item: nil, + Group: &TagGroupDocument{Tags: []string{"xxx"}}, + }, + want: nil, + wantErr: true, + }, + { + name: "pass", + fields: fields{ + ID: tg.ID().String(), + Label: "group", + Scene: sid.String(), + Item: nil, + Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, + }, + want: tg, + wantErr: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + d := &TagDocument{ + ID: tc.fields.ID, + Label: tc.fields.Label, + Scene: tc.fields.Scene, + Item: tc.fields.Item, + Group: tc.fields.Group, + } + got, err := d.ModelGroup() + if tc.wantErr { + assert.Error(t, err) + } + assert.Equal(t, tc.want, got) + }) + } +} + +func TestTagDocument_ModelItem(t *testing.T) { + sid := id.NewSceneID() + dssid := id.NewDatasetSchemaID() + dsid := id.NewDatasetID() + dssfid := id.NewDatasetFieldID() + ti, _ := tag.NewItem(). + NewID(). + Label("Item"). + Scene(sid). + LinkedDatasetFieldID(dssfid.Ref()). + LinkedDatasetID(dsid.Ref()). + LinkedDatasetSchemaID(dssid.Ref()). + Build() + type fields struct { + ID string + Label string + Scene string + Item *TagItemDocument + Group *TagGroupDocument + } + + tests := []struct { + name string + fields fields + want *tag.Item + wantErr bool + }{ + { + name: "invalid id", + fields: fields{ + ID: "xxx", + Item: &TagItemDocument{}, + }, + want: nil, + wantErr: true, + }, + { + name: "invalid id", + fields: fields{ + ID: id.NewTagID().String(), + Scene: "xxx", + Item: &TagItemDocument{}, + }, + want: nil, + wantErr: true, + }, + { + name: "pass", + fields: fields{ + ID: ti.ID().String(), + Label: ti.Label(), + Scene: ti.Scene().String(), + Item: &TagItemDocument{ + LinkedDatasetFieldID: dssfid.StringRef(), + LinkedDatasetID: dsid.StringRef(), + LinkedDatasetSchemaID: dssid.StringRef(), + }, + Group: nil, + }, + want: ti, + wantErr: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + d := &TagDocument{ + ID: tc.fields.ID, + Label: tc.fields.Label, + Scene: tc.fields.Scene, + Item: tc.fields.Item, + Group: tc.fields.Group, + } + got, err := d.ModelItem() + if tc.wantErr { + assert.Error(t, err) + } + assert.Equal(t, tc.want, got) + }) + } +} diff --git a/server/internal/infrastructure/mongo/mongodoc/team.go b/server/internal/infrastructure/mongo/mongodoc/team.go new file mode 100644 index 000000000..f440aaa1d --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/team.go @@ -0,0 +1,93 @@ +package mongodoc + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" + "go.mongodb.org/mongo-driver/bson" +) + +type TeamMemberDocument struct { + Role string +} + +type TeamDocument struct { + ID string + Name string + Members map[string]TeamMemberDocument + Personal bool +} + +type TeamConsumer struct { + Rows user.TeamList +} + +func (c *TeamConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc TeamDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + project, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, project) + return nil +} + +func NewTeam(team *user.Team) (*TeamDocument, string) { + membersDoc := map[string]TeamMemberDocument{} + for user, r := range team.Members().Members() { + membersDoc[user.String()] = TeamMemberDocument{ + Role: string(r), + } + } + id := team.ID().String() + return &TeamDocument{ + ID: id, + Name: team.Name(), + Members: membersDoc, + Personal: team.IsPersonal(), + }, id +} + +func (d *TeamDocument) Model() (*user.Team, error) { + tid, err := id.TeamIDFrom(d.ID) + if err != nil { + return nil, err + } + + members := map[id.UserID]user.Role{} + if d.Members != nil { + for uid, member := range d.Members { + uid, err := id.UserIDFrom(uid) + if err != nil { + return nil, err + } + members[uid] = user.Role(member.Role) + } + } + return user.NewTeam(). + ID(tid). + Name(d.Name). + Members(members). + Personal(d.Personal). + Build() +} + +func NewTeams(teams []*user.Team) ([]*TeamDocument, []string) { + res := make([]*TeamDocument, 0, len(teams)) + ids := make([]string, 0, len(teams)) + for _, d := range teams { + if d == nil { + continue + } + r, id := NewTeam(d) + res = append(res, r) + ids = append(ids, id) + } + return res, ids +} diff --git a/server/internal/infrastructure/mongo/mongodoc/user.go b/server/internal/infrastructure/mongo/mongodoc/user.go new file mode 100644 index 000000000..ae16cdef7 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/user.go @@ -0,0 +1,146 @@ +package mongodoc + +import ( + "time" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" + user1 "github.com/reearth/reearth-backend/pkg/user" +) + +type PasswordResetDocument struct { + Token string + CreatedAt time.Time +} + +type UserDocument struct { + ID string + Name string + Email string + Auth0Sub string + Auth0SubList []string + Team string + Lang string + Theme string + Password []byte + PasswordReset *PasswordResetDocument + Verification *UserVerificationDoc +} + +type UserVerificationDoc struct { + Code string + Expiration time.Time + Verified bool +} + +type UserConsumer struct { + Rows []*user1.User +} + +func (u *UserConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc UserDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + user, err := doc.Model() + if err != nil { + return err + } + u.Rows = append(u.Rows, user) + return nil +} + +func NewUser(user *user1.User) (*UserDocument, string) { + id := user.ID().String() + auths := user.Auths() + authsdoc := make([]string, 0, len(auths)) + for _, a := range auths { + authsdoc = append(authsdoc, a.Sub) + } + var v *UserVerificationDoc + if user.Verification() != nil { + v = &UserVerificationDoc{ + Code: user.Verification().Code(), + Expiration: user.Verification().Expiration(), + Verified: user.Verification().IsVerified(), + } + } + pwdReset := user.PasswordReset() + + var pwdResetDoc *PasswordResetDocument + if pwdReset != nil { + pwdResetDoc = &PasswordResetDocument{ + Token: pwdReset.Token, + CreatedAt: pwdReset.CreatedAt, + } + } + + return &UserDocument{ + ID: id, + Name: user.Name(), + Email: user.Email(), + Auth0SubList: authsdoc, + Team: user.Team().String(), + Lang: user.Lang().String(), + Theme: string(user.Theme()), + Verification: v, + Password: user.Password(), + PasswordReset: pwdResetDoc, + }, id +} + +func (d *UserDocument) Model() (*user1.User, error) { + uid, err := id.UserIDFrom(d.ID) + if err != nil { + return nil, err + } + tid, err := id.TeamIDFrom(d.Team) + if err != nil { + return nil, err + } + auths := make([]user.Auth, 0, len(d.Auth0SubList)) + for _, s := range d.Auth0SubList { + auths = append(auths, user.AuthFromAuth0Sub(s)) + } + if d.Auth0Sub != "" { + auths = append(auths, user.AuthFromAuth0Sub(d.Auth0Sub)) + } + var v *user.Verification + if d.Verification != nil { + v = user.VerificationFrom(d.Verification.Code, d.Verification.Expiration, d.Verification.Verified) + } + + u, err := user1.New(). + ID(uid). + Name(d.Name). + Email(d.Email). + Auths(auths). + Team(tid). + LangFrom(d.Lang). + Verification(v). + EncodedPassword(d.Password). + PasswordReset(d.PasswordReset.Model()). + Theme(user.Theme(d.Theme)). + Build() + + if err != nil { + return nil, err + } + return u, nil +} + +func (d *PasswordResetDocument) Model() *user1.PasswordReset { + if d == nil { + return nil + } + return &user1.PasswordReset{ + Token: d.Token, + CreatedAt: d.CreatedAt, + } +} diff --git a/server/internal/infrastructure/mongo/mongodoc/util.go b/server/internal/infrastructure/mongo/mongodoc/util.go new file mode 100644 index 000000000..175407ed7 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/util.go @@ -0,0 +1,155 @@ +package mongodoc + +import "go.mongodb.org/mongo-driver/bson" + +func convertDToM(i interface{}) interface{} { + if i == nil { + return nil + } + switch i2 := i.(type) { + case bson.D: + return i2.Map() + case bson.A: + a := make([]interface{}, 0, len(i2)) + for _, e := range i2 { + a = append(a, convertDToM(e)) + } + return a + case []bson.M: + a := make([]interface{}, 0, len(i2)) + for _, e := range i2 { + a = append(a, convertDToM(e)) + } + return a + case []bson.D: + a := make([]interface{}, 0, len(i2)) + for _, e := range i2 { + a = append(a, convertDToM(e)) + } + return a + case []bson.A: + a := make([]interface{}, 0, len(i2)) + for _, e := range i2 { + a = append(a, convertDToM(e)) + } + return a + case []interface{}: + a := make([]interface{}, 0, len(i2)) + for _, e := range i2 { + a = append(a, convertDToM(e)) + } + return a + } + return i +} + +func appendI(f interface{}, elements ...interface{}) interface{} { + switch f2 := f.(type) { + case []bson.D: + res := make([]interface{}, 0, len(f2)) + for _, e := range f2 { + res = append(res, e) + } + return append(res, elements...) + case []bson.M: + res := make([]interface{}, 0, len(f2)+len(elements)) + for _, e := range f2 { + res = append(res, e) + } + return append(res, elements...) + case bson.A: + res := make([]interface{}, 0, len(f2)+len(elements)) + return append(res, append(f2, elements...)...) + case []interface{}: + res := make([]interface{}, 0, len(f2)+len(elements)) + return append(res, append(f2, elements...)...) + } + return f +} + +func appendE(f interface{}, elements ...bson.E) interface{} { + switch f2 := f.(type) { + case bson.D: + for _, e := range elements { + f2 = append(f2, e) + } + return f2 + case bson.M: + f3 := make(bson.M, len(f2)) + for k, v := range f2 { + f3[k] = v + } + for _, e := range elements { + f3[e.Key] = e.Value + } + return f3 + } + return f +} + +func getE(f interface{}, k string) interface{} { + switch g := f.(type) { + case bson.D: + for _, e := range g { + if e.Key == k { + return e.Value + } + } + case bson.M: + return g[k] + } + return nil +} + +func And(filter interface{}, key string, f interface{}) interface{} { + if f == nil { + return filter + } + if g, ok := f.(bson.M); ok && g == nil { + return filter + } + if g, ok := f.(bson.D); ok && g == nil { + return filter + } + if g, ok := f.(bson.A); ok && g == nil { + return filter + } + if g, ok := f.([]interface{}); ok && g == nil { + return filter + } + if g, ok := f.([]bson.M); ok && g == nil { + return filter + } + if g, ok := f.([]bson.D); ok && g == nil { + return filter + } + if g, ok := f.([]bson.A); ok && g == nil { + return filter + } + + if key != "" && getE(filter, key) != nil { + return filter + } + var g interface{} + if key == "" { + g = f + } else { + g = bson.M{key: f} + } + if getE(filter, "$or") != nil { + return bson.M{ + "$and": []interface{}{filter, g}, + } + } + if and := getE(filter, "$and"); and != nil { + return bson.M{ + "$and": appendI(and, g), + } + } + if key == "" { + return bson.M{ + "$and": []interface{}{filter, g}, + } + } + return appendE(filter, bson.E{Key: key, Value: f}) +} diff --git a/server/internal/infrastructure/mongo/mongodoc/util_test.go b/server/internal/infrastructure/mongo/mongodoc/util_test.go new file mode 100644 index 000000000..806e0a128 --- /dev/null +++ b/server/internal/infrastructure/mongo/mongodoc/util_test.go @@ -0,0 +1,99 @@ +package mongodoc + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "go.mongodb.org/mongo-driver/bson" +) + +func TestConvertDToM(t *testing.T) { + assert.Equal(t, bson.M{"a": "b"}, convertDToM(bson.M{"a": "b"})) + assert.Equal(t, bson.M{"a": "b"}, convertDToM(bson.D{{Key: "a", Value: "b"}})) + assert.Equal(t, []interface{}{bson.M{"a": "b"}}, convertDToM([]bson.D{{{Key: "a", Value: "b"}}})) + assert.Equal(t, []interface{}{bson.M{"a": "b"}}, convertDToM([]bson.M{{"a": "b"}})) + assert.Equal(t, []interface{}{bson.M{"a": "b"}}, convertDToM(bson.A{bson.D{{Key: "a", Value: "b"}}})) + assert.Equal(t, []interface{}{bson.M{"a": "b"}}, convertDToM([]interface{}{bson.D{{Key: "a", Value: "b"}}})) +} + +func TestAppendI(t *testing.T) { + assert.Equal(t, []interface{}{bson.M{"a": "b"}, "x"}, appendI([]bson.M{{"a": "b"}}, "x")) + assert.Equal(t, []interface{}{bson.D{{Key: "a", Value: "b"}}, "x"}, appendI([]bson.D{{{Key: "a", Value: "b"}}}, "x")) + assert.Equal(t, []interface{}{bson.D{{Key: "a", Value: "b"}}, "x"}, appendI(bson.A{bson.D{{Key: "a", Value: "b"}}}, "x")) + assert.Equal(t, []interface{}{bson.D{{Key: "a", Value: "b"}}, "x"}, appendI([]interface{}{bson.D{{Key: "a", Value: "b"}}}, "x")) +} + +func TestAppendE(t *testing.T) { + assert.Equal(t, bson.M{"a": "b", "c": "d"}, appendE(bson.M{"a": "b"}, bson.E{Key: "c", Value: "d"})) + assert.Equal(t, bson.D{{Key: "a", Value: "b"}, {Key: "c", Value: "d"}}, appendE(bson.D{{Key: "a", Value: "b"}}, bson.E{Key: "c", Value: "d"})) + assert.Equal(t, []bson.M{}, appendE([]bson.M{}, bson.E{Key: "c", Value: "d"})) +} + +func TestGetE(t *testing.T) { + assert.Equal(t, "b", getE(bson.M{"a": "b"}, "a")) + assert.Nil(t, getE(bson.M{"a": "b"}, "b")) + assert.Equal(t, "b", getE(bson.D{{Key: "a", Value: "b"}}, "a")) + assert.Nil(t, getE(bson.D{{Key: "a", Value: "b"}}, "b")) + assert.Nil(t, getE(bson.A{}, "b")) +} + +func TestAnd(t *testing.T) { + assert.Equal(t, bson.M{"x": "y"}, And(bson.M{}, "x", "y")) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "x", "y")) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", nil)) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", bson.M(nil))) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", bson.D(nil))) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", bson.A(nil))) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", []bson.M(nil))) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", []bson.D(nil))) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", []bson.A(nil))) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", []interface{}(nil))) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.M{"$or": []bson.M{{"a": "b"}}}, + bson.M{"x": "y"}, + }, + }, And(bson.M{"$or": []bson.M{{"a": "b"}}}, "x", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.M{"a": "b"}, + bson.M{"x": "y"}, + }, + }, And(bson.M{"$and": []bson.M{{"a": "b"}}}, "x", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.M{"a": "b"}, + bson.M{"x": "y"}, + }, + }, And(bson.M{"$and": []interface{}{bson.M{"a": "b"}}}, "x", "y")) + + assert.Equal(t, bson.D{{Key: "x", Value: "y"}}, And(bson.D{}, "x", "y")) + assert.Equal(t, bson.D{{Key: "x", Value: "z"}}, And(bson.D{{Key: "x", Value: "z"}}, "x", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.D{{Key: "$or", Value: []bson.M{{"a": "b"}}}}, + bson.M{"x": "y"}, + }, + }, And(bson.D{{Key: "$or", Value: []bson.M{{"a": "b"}}}}, "x", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.M{"a": "b"}, + bson.M{"x": "y"}, + }, + }, And(bson.D{{Key: "$and", Value: []bson.M{{"a": "b"}}}}, "x", "y")) + + assert.Equal(t, bson.M{"$and": []interface{}{bson.M{}, "y"}}, And(bson.M{}, "", "y")) + assert.Equal(t, bson.M{"$and": []interface{}{bson.D{}, "y"}}, And(bson.D{}, "", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.D{{Key: "$or", Value: []bson.M{{"a": "b"}}}}, + "y", + }, + }, And(bson.D{{Key: "$or", Value: []bson.M{{"a": "b"}}}}, "", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.M{"a": "b"}, + "y", + }, + }, And(bson.D{{Key: "$and", Value: []bson.M{{"a": "b"}}}}, "", "y")) +} diff --git a/server/internal/infrastructure/mongo/plugin.go b/server/internal/infrastructure/mongo/plugin.go new file mode 100644 index 000000000..8469f84c9 --- /dev/null +++ b/server/internal/infrastructure/mongo/plugin.go @@ -0,0 +1,132 @@ +package mongo + +import ( + "context" + "errors" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type pluginRepo struct { + client *mongodoc.ClientCollection + f repo.SceneFilter +} + +func NewPlugin(client *mongodoc.Client) repo.Plugin { + r := &pluginRepo{client: client.WithCollection("plugin")} + r.init() + return r +} + +func (r *pluginRepo) init() { + i := r.client.CreateIndex(context.Background(), []string{"scene"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "plugin", i) + } +} + +func (r *pluginRepo) Filtered(f repo.SceneFilter) repo.Plugin { + return &pluginRepo{ + client: r.client, + f: r.f.Merge(f), + } +} + +func (r *pluginRepo) FindByID(ctx context.Context, pid id.PluginID) (*plugin.Plugin, error) { + // TODO: separate built-in plugins to another repository + if p := builtin.GetPlugin(pid); p != nil { + return p, nil + } + if s := pid.Scene(); s != nil && !r.f.CanRead(*s) { + return nil, rerror.ErrNotFound + } + return r.findOne(ctx, bson.M{ + "id": pid.String(), + }) +} + +func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { + if len(ids) == 0 { + return nil, nil + } + + // TODO: separate built-in plugins to another repository + // exclude built-in + b := plugin.Map{} + ids2 := make([]id.PluginID, 0, len(ids)) + for _, id := range ids { + if p := builtin.GetPlugin(id); p != nil { + b[id] = p + } else if s := id.Scene(); s == nil || r.f.CanRead(*s) { + ids2 = append(ids2, id) + } + } + + res := make(plugin.List, 0, len(ids2)) + var err error + + if len(ids2) > 0 { + filter := bson.M{ + "id": bson.M{"$in": id.PluginIDsToStrings(ids2)}, + } + dst := make([]*plugin.Plugin, 0, len(ids2)) + res, err = r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + } + + return res.Concat(b.List()).MapToIDs(ids), nil +} + +func (r *pluginRepo) Save(ctx context.Context, plugin *plugin.Plugin) error { + if plugin.ID().System() { + return errors.New("cannnot save system plugin") + } + if s := plugin.ID().Scene(); s != nil && !r.f.CanWrite(*s) { + return repo.ErrOperationDenied + } + doc, id := mongodoc.NewPlugin(plugin) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *pluginRepo) Remove(ctx context.Context, id id.PluginID) error { + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) +} + +func (r *pluginRepo) find(ctx context.Context, dst []*plugin.Plugin, filter interface{}) ([]*plugin.Plugin, error) { + c := mongodoc.PluginConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *pluginRepo) findOne(ctx context.Context, filter interface{}) (*plugin.Plugin, error) { + dst := make([]*plugin.Plugin, 0, 1) + c := mongodoc.PluginConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func (r *pluginRepo) readFilter(filter interface{}) interface{} { + return applyOptionalSceneFilter(filter, r.f.Readable) +} + +func (r *pluginRepo) writeFilter(filter interface{}) interface{} { + return applyOptionalSceneFilter(filter, r.f.Writable) +} diff --git a/server/internal/infrastructure/mongo/project.go b/server/internal/infrastructure/mongo/project.go new file mode 100644 index 000000000..4d5309801 --- /dev/null +++ b/server/internal/infrastructure/mongo/project.go @@ -0,0 +1,158 @@ +package mongo + +import ( + "context" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type projectRepo struct { + client *mongodoc.ClientCollection + f repo.TeamFilter +} + +func NewProject(client *mongodoc.Client) repo.Project { + r := &projectRepo{client: client.WithCollection("project")} + r.init() + return r +} + +func (r *projectRepo) init() { + i := r.client.CreateIndex(context.Background(), []string{"alias", "team"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "project", i) + } +} + +func (r *projectRepo) Filtered(f repo.TeamFilter) repo.Project { + return &projectRepo{ + client: r.client, + f: r.f.Merge(f), + } +} + +func (r *projectRepo) FindByID(ctx context.Context, id id.ProjectID) (*project.Project, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) +} + +func (r *projectRepo) FindByIDs(ctx context.Context, ids id.ProjectIDList) ([]*project.Project, error) { + if len(ids) == 0 { + return nil, nil + } + + filter := bson.M{ + "id": bson.M{ + "$in": ids.Strings(), + }, + } + dst := make([]*project.Project, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterProjects(ids, res), nil +} + +func (r *projectRepo) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { + if !r.f.CanRead(id) { + return nil, usecase.EmptyPageInfo(), nil + } + return r.paginate(ctx, bson.M{ + "team": id.String(), + }, pagination) +} + +func (r *projectRepo) FindByPublicName(ctx context.Context, name string) (*project.Project, error) { + if name == "" { + return nil, rerror.ErrNotFound + } + return r.findOne(ctx, bson.M{ + "$or": []bson.M{ + {"alias": name, "publishmentstatus": "limited"}, + {"domains.domain": name, "publishmentstatus": "public"}, + {"alias": name, "publishmentstatus": "public"}, + }, + }) +} + +func (r *projectRepo) CountByTeam(ctx context.Context, team id.TeamID) (int, error) { + count, err := r.client.Count(ctx, bson.M{ + "team": team.String(), + }) + return int(count), err +} + +func (r *projectRepo) Save(ctx context.Context, project *project.Project) error { + if !r.f.CanWrite(project.Team()) { + return repo.ErrOperationDenied + } + doc, id := mongodoc.NewProject(project) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *projectRepo) Remove(ctx context.Context, id id.ProjectID) error { + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) +} + +func (r *projectRepo) find(ctx context.Context, dst []*project.Project, filter interface{}) ([]*project.Project, error) { + c := mongodoc.ProjectConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *projectRepo) findOne(ctx context.Context, filter interface{}) (*project.Project, error) { + dst := make([]*project.Project, 0, 1) + c := mongodoc.ProjectConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func (r *projectRepo) paginate(ctx context.Context, filter bson.M, pagination *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { + var c mongodoc.ProjectConsumer + pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), nil, pagination, &c) + if err != nil { + return nil, nil, rerror.ErrInternalBy(err) + } + return c.Rows, pageInfo, nil +} + +func filterProjects(ids []id.ProjectID, rows []*project.Project) []*project.Project { + res := make([]*project.Project, 0, len(ids)) + for _, id := range ids { + var r2 *project.Project + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (r *projectRepo) readFilter(filter interface{}) interface{} { + return applyTeamFilter(filter, r.f.Readable) +} + +func (r *projectRepo) writeFilter(filter interface{}) interface{} { + return applyTeamFilter(filter, r.f.Writable) +} diff --git a/server/internal/infrastructure/mongo/property.go b/server/internal/infrastructure/mongo/property.go new file mode 100644 index 000000000..2632b3fe1 --- /dev/null +++ b/server/internal/infrastructure/mongo/property.go @@ -0,0 +1,219 @@ +package mongo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "go.mongodb.org/mongo-driver/bson" +) + +type propertyRepo struct { + client *mongodoc.ClientCollection + f repo.SceneFilter +} + +func NewProperty(client *mongodoc.Client) repo.Property { + r := &propertyRepo{client: client.WithCollection("property")} + r.init() + return r +} + +func (r *propertyRepo) init() { + i := r.client.CreateIndex(context.Background(), []string{"scene", "schema"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "property", i) + } +} + +func (r *propertyRepo) Filtered(f repo.SceneFilter) repo.Property { + return &propertyRepo{ + client: r.client, + f: r.f.Merge(f), + } +} + +func (r *propertyRepo) FindByID(ctx context.Context, id id.PropertyID) (*property.Property, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) +} + +func (r *propertyRepo) FindByIDs(ctx context.Context, ids id.PropertyIDList) (property.List, error) { + if len(ids) == 0 { + return nil, nil + } + + filter := bson.M{ + "id": bson.M{ + "$in": ids.Strings(), + }, + } + dst := make(property.List, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterProperties(ids, res), nil +} + +func (r *propertyRepo) FindLinkedAll(ctx context.Context, id id.SceneID) (property.List, error) { + return r.find(ctx, nil, bson.M{ + "scene": id.String(), + "fields": bson.M{ + "$elemMatch": bson.M{ + "links": bson.M{ + "$not": bson.M{ + "$size": 0, + }, + }, + }, + }, + }) +} + +func (r *propertyRepo) FindByDataset(ctx context.Context, sid id.DatasetSchemaID, did id.DatasetID) (property.List, error) { + return r.find(ctx, nil, bson.M{ + "$or": []bson.M{ + {"fields.links.dataset": did.String()}, // for compatibility + {"items.fields.links.dataset": did.String()}, + {"items.groups.fields.links.dataset": did.String()}, + {"fields.links.schema": sid.String()}, // for compatibility + {"items.fields.links.schema": sid.String()}, + {"items.groups.fields.links.schema": sid.String()}, + }, + }) +} + +func (r *propertyRepo) FindBySchema(ctx context.Context, psids []id.PropertySchemaID, sid id.SceneID) (property.List, error) { + if len(psids) == 0 || !r.f.CanRead(sid) { + return nil, nil + } + + filters := make([]bson.M, 0, len(psids)) + for _, s := range psids { + filters = append(filters, bson.M{ + "schemaplugin": s.Plugin().String(), + "schemaname": s.ID(), + "scene": sid.String(), + }) + } + filter := bson.M{"$and": filters} + return r.find(ctx, nil, filter) +} + +func (r *propertyRepo) FindByPlugin(ctx context.Context, pid id.PluginID, sid id.SceneID) (property.List, error) { + if !r.f.CanRead(sid) { + return nil, rerror.ErrNotFound + } + if s := pid.Scene(); s != nil && !r.f.CanRead(*s) { + return nil, rerror.ErrNotFound + } + filter := bson.M{ + "schemaplugin": pid.String(), + "scene": sid.String(), + } + return r.find(ctx, nil, filter) +} + +func (r *propertyRepo) Save(ctx context.Context, property *property.Property) error { + if !r.f.CanWrite(property.Scene()) { + return repo.ErrOperationDenied + } + doc, id := mongodoc.NewProperty(property) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *propertyRepo) SaveAll(ctx context.Context, properties property.List) error { + if len(properties) == 0 { + return nil + } + docs, ids := mongodoc.NewProperties(properties, r.f.Writable) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *propertyRepo) UpdateSchemaPlugin(ctx context.Context, old, new id.PluginID, s id.SceneID) error { + if !r.f.CanWrite(s) { + return nil + } + return r.client.UpdateMany(ctx, bson.M{ + "schemaplugin": old.String(), + "scene": s.String(), + }, bson.M{ + "schemaplugin": new.String(), + }) +} + +func (r *propertyRepo) Remove(ctx context.Context, id id.PropertyID) error { + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) +} + +func (r *propertyRepo) RemoveAll(ctx context.Context, ids id.PropertyIDList) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ + "id": bson.M{"$in": ids.Strings()}, + })) +} + +func (r *propertyRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + _, err := r.client.Collection().DeleteMany(ctx, bson.M{ + "scene": sceneID.String(), + }) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +func (r *propertyRepo) find(ctx context.Context, dst property.List, filter interface{}) (property.List, error) { + c := mongodoc.PropertyConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *propertyRepo) findOne(ctx context.Context, filter interface{}) (*property.Property, error) { + dst := make(property.List, 0, 1) + c := mongodoc.PropertyConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func filterProperties(ids []id.PropertyID, rows property.List) property.List { + res := make(property.List, 0, len(ids)) + for _, id := range ids { + var r2 *property.Property + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (r *propertyRepo) readFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Readable) +} + +func (r *propertyRepo) writeFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Writable) +} diff --git a/server/internal/infrastructure/mongo/property_schema.go b/server/internal/infrastructure/mongo/property_schema.go new file mode 100644 index 000000000..4da9fdc7d --- /dev/null +++ b/server/internal/infrastructure/mongo/property_schema.go @@ -0,0 +1,152 @@ +package mongo + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/property" + "go.mongodb.org/mongo-driver/bson" +) + +type propertySchemaRepo struct { + client *mongodoc.ClientCollection + f repo.SceneFilter +} + +func NewPropertySchema(client *mongodoc.Client) repo.PropertySchema { + r := &propertySchemaRepo{client: client.WithCollection("propertySchema")} + r.init() + return r +} + +func (r *propertySchemaRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "propertySchema", i) + } +} + +func (r *propertySchemaRepo) Filtered(f repo.SceneFilter) repo.PropertySchema { + return &propertySchemaRepo{ + client: r.client, + f: r.f.Merge(f), + } +} + +func (r *propertySchemaRepo) FindByID(ctx context.Context, id id.PropertySchemaID) (*property.Schema, error) { + if ps := builtin.GetPropertySchema(id); ps != nil { + return ps, nil + } + + filter := bson.D{{Key: "id", Value: id.String()}} + return r.findOne(ctx, filter) +} + +func (r *propertySchemaRepo) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { + if len(ids) == 0 { + return nil, nil + } + + // exclude built-in + b := property.SchemaMap{} + ids2 := make([]id.PropertySchemaID, 0, len(ids)) + for _, id := range ids { + if p := builtin.GetPropertySchema(id); p != nil { + b[id] = p + } else if s := id.Plugin().Scene(); s == nil || r.f.CanRead(*s) { + ids2 = append(ids2, id) + } + } + + res := make(property.SchemaList, 0, len(ids2)) + var err error + + if len(ids2) > 0 { + filter := bson.D{{Key: "id", Value: bson.D{{ + Key: "$in", Value: id.PropertySchemaIDsToStrings(ids2), + }}}} + dst := make(property.SchemaList, 0, len(ids2)) + res, err = r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + } + + return res.Concat(b.List()).MapToIDs(ids), nil +} + +func (r *propertySchemaRepo) Save(ctx context.Context, m *property.Schema) error { + if m.ID().Plugin().System() { + return errors.New("cannnot save system property schema") + } + if s := m.Scene(); s != nil && !r.f.CanWrite(*s) { + return repo.ErrOperationDenied + } + + doc, id := mongodoc.NewPropertySchema(m) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *propertySchemaRepo) SaveAll(ctx context.Context, m property.SchemaList) error { + savable := make(property.SchemaList, 0, len(m)) + for _, ps := range m { + if ps.ID().Plugin().System() { + continue + } + savable = append(savable, ps) + } + + if len(m) == 0 { + return nil + } + + docs, ids := mongodoc.NewPropertySchemas(savable, r.f.Writable) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *propertySchemaRepo) Remove(ctx context.Context, id id.PropertySchemaID) error { + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) +} + +func (r *propertySchemaRepo) RemoveAll(ctx context.Context, ids []id.PropertySchemaID) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ + "id": bson.M{"$in": id.PropertySchemaIDsToStrings(ids)}, + })) +} + +func (r *propertySchemaRepo) find(ctx context.Context, dst property.SchemaList, filter interface{}) (property.SchemaList, error) { + c := mongodoc.PropertySchemaConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *propertySchemaRepo) findOne(ctx context.Context, filter interface{}) (*property.Schema, error) { + dst := make(property.SchemaList, 0, 1) + c := mongodoc.PropertySchemaConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func (r *propertySchemaRepo) readFilter(filter interface{}) interface{} { + return applyOptionalSceneFilter(filter, r.f.Readable) +} + +func (r *propertySchemaRepo) writeFilter(filter interface{}) interface{} { + return applyOptionalSceneFilter(filter, r.f.Writable) +} diff --git a/server/internal/infrastructure/mongo/scene.go b/server/internal/infrastructure/mongo/scene.go new file mode 100644 index 000000000..a9567316c --- /dev/null +++ b/server/internal/infrastructure/mongo/scene.go @@ -0,0 +1,118 @@ +package mongo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/user" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" +) + +type sceneRepo struct { + client *mongodoc.ClientCollection + f repo.TeamFilter +} + +func NewScene(client *mongodoc.Client) repo.Scene { + r := &sceneRepo{client: client.WithCollection("scene")} + r.init() + return r +} + +func (r *sceneRepo) init() { + i := r.client.CreateIndex(context.Background(), []string{"project"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "scene", i) + } +} + +func (r *sceneRepo) Filtered(f repo.TeamFilter) repo.Scene { + return &sceneRepo{ + client: r.client, + f: r.f.Merge(f), + } +} + +func (r *sceneRepo) FindByID(ctx context.Context, id id.SceneID) (*scene.Scene, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) +} + +func (r *sceneRepo) FindByIDs(ctx context.Context, ids id.SceneIDList) (scene.List, error) { + if len(ids) == 0 { + return nil, nil + } + + return r.find(ctx, make(scene.List, 0, len(ids)), bson.M{ + "id": bson.M{ + "$in": ids.Strings(), + }, + }) +} + +func (r *sceneRepo) FindByProject(ctx context.Context, id id.ProjectID) (*scene.Scene, error) { + return r.findOne(ctx, bson.M{ + "project": id.String(), + }) +} + +func (r *sceneRepo) FindByTeam(ctx context.Context, teams ...id.TeamID) (scene.List, error) { + teams2 := id.TeamIDList(teams) + if r.f.Readable != nil { + teams2 = teams2.Intersect(r.f.Readable) + } + res, err := r.find(ctx, nil, bson.M{ + "team": bson.M{"$in": user.TeamIDList(teams).Strings()}, + }) + if err != nil && err != mongo.ErrNilDocument && err != mongo.ErrNoDocuments { + return nil, err + } + return res, nil +} + +func (r *sceneRepo) Save(ctx context.Context, scene *scene.Scene) error { + if !r.f.CanWrite(scene.Team()) { + return repo.ErrOperationDenied + } + doc, id := mongodoc.NewScene(scene) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *sceneRepo) Remove(ctx context.Context, id id.SceneID) error { + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) +} + +func (r *sceneRepo) find(ctx context.Context, dst []*scene.Scene, filter interface{}) ([]*scene.Scene, error) { + c := mongodoc.SceneConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *sceneRepo) findOne(ctx context.Context, filter interface{}) (*scene.Scene, error) { + dst := make([]*scene.Scene, 0, 1) + c := mongodoc.SceneConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func (r *sceneRepo) readFilter(filter interface{}) interface{} { + return applyTeamFilter(filter, r.f.Readable) +} + +func (r *sceneRepo) writeFilter(filter interface{}) interface{} { + return applyTeamFilter(filter, r.f.Writable) +} diff --git a/server/internal/infrastructure/mongo/scene_lock.go b/server/internal/infrastructure/mongo/scene_lock.go new file mode 100644 index 000000000..da8cff8a1 --- /dev/null +++ b/server/internal/infrastructure/mongo/scene_lock.go @@ -0,0 +1,75 @@ +package mongo + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +type sceneLockRepo struct { + client *mongodoc.ClientCollection +} + +func NewSceneLock(client *mongodoc.Client) repo.SceneLock { + return &sceneLockRepo{client: client.WithCollection("sceneLock")} +} + +func (r *sceneLockRepo) GetLock(ctx context.Context, sceneID id.SceneID) (scene.LockMode, error) { + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + } + var c mongodoc.SceneLockConsumer + if err2 := r.client.FindOne(ctx, filter, &c); err2 != nil { + if errors.Is(err2, rerror.ErrNotFound) { + return scene.LockModeFree, nil + } + return scene.LockMode(""), err2 + } + return c.Rows[0], nil +} + +func (r *sceneLockRepo) GetAllLock(ctx context.Context, ids id.SceneIDList) ([]scene.LockMode, error) { + filter := bson.D{ + {Key: "scene", Value: bson.D{ + {Key: "$in", Value: ids.Strings()}, + }}, + } + c := mongodoc.SceneLockConsumer{ + Rows: make([]scene.LockMode, 0, len(ids)), + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *sceneLockRepo) SaveLock(ctx context.Context, sceneID id.SceneID, lock scene.LockMode) error { + filter := bson.D{{Key: "scene", Value: sceneID.String()}} + doc := mongodoc.NewSceneLock(sceneID, lock) + upsert := true + if _, err2 := r.client.Collection().UpdateOne(ctx, filter, bson.D{ + {Key: "$set", Value: doc}, + }, &options.UpdateOptions{ + Upsert: &upsert, + }); err2 != nil { + return rerror.ErrInternalBy(err2) + } + return nil +} + +func (r *sceneLockRepo) ReleaseAllLock(ctx context.Context) error { + if _, err2 := r.client.Collection().DeleteMany(ctx, bson.D{}); err2 != nil { + if err2 != mongo.ErrNilDocument && err2 != mongo.ErrNoDocuments { + return rerror.ErrInternalBy(err2) + } + } + return nil +} diff --git a/server/internal/infrastructure/mongo/tag.go b/server/internal/infrastructure/mongo/tag.go new file mode 100644 index 000000000..fb634dd66 --- /dev/null +++ b/server/internal/infrastructure/mongo/tag.go @@ -0,0 +1,292 @@ +package mongo + +import ( + "context" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/tag" +) + +type tagRepo struct { + client *mongodoc.ClientCollection + f repo.SceneFilter +} + +func NewTag(client *mongodoc.Client) repo.Tag { + r := &tagRepo{client: client.WithCollection("tag")} + r.init() + return r +} + +func (r *tagRepo) init() { + i := r.client.CreateIndex(context.Background(), []string{"scene", "group.tags", "item.parent"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "tag", i) + } +} + +func (r *tagRepo) Filtered(f repo.SceneFilter) repo.Tag { + return &tagRepo{ + client: r.client, + f: r.f.Merge(f), + } +} + +func (r *tagRepo) FindByID(ctx context.Context, id id.TagID) (tag.Tag, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) +} + +func (r *tagRepo) FindByIDs(ctx context.Context, ids id.TagIDList) ([]*tag.Tag, error) { + if len(ids) == 0 { + return nil, nil + } + + filter := bson.M{ + "id": bson.M{ + "$in": ids.Strings(), + }, + } + dst := make([]*tag.Tag, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterTags(ids, res), nil +} + +func (r *tagRepo) FindByScene(ctx context.Context, id id.SceneID) ([]*tag.Tag, error) { + if !r.f.CanRead(id) { + return nil, nil + } + filter := bson.M{ + "scene": id.String(), + } + return r.find(ctx, nil, filter) +} + +func (r *tagRepo) FindItemByID(ctx context.Context, id id.TagID) (*tag.Item, error) { + filter := bson.M{ + "id": id.String(), + } + return r.findItemOne(ctx, filter) +} + +func (r *tagRepo) FindItemByIDs(ctx context.Context, ids id.TagIDList) ([]*tag.Item, error) { + filter := bson.M{ + "id": bson.M{ + "$in": ids.Strings(), + }, + } + dst := make([]*tag.Item, 0, len(ids)) + res, err := r.findItems(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterTagItems(ids, res), nil +} + +func (r *tagRepo) FindGroupByID(ctx context.Context, id id.TagID) (*tag.Group, error) { + filter := bson.M{ + "id": id.String(), + } + return r.findGroupOne(ctx, filter) +} + +func (r *tagRepo) FindGroupByIDs(ctx context.Context, ids id.TagIDList) ([]*tag.Group, error) { + filter := bson.M{ + "id": bson.M{ + "$in": ids.Strings(), + }, + } + dst := make([]*tag.Group, 0, len(ids)) + res, err := r.findGroups(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterTagGroups(ids, res), nil +} + +func (r *tagRepo) FindRootsByScene(ctx context.Context, id id.SceneID) ([]*tag.Tag, error) { + return r.find(ctx, nil, bson.M{ + "scene": id.String(), + "item.parent": nil, + }) +} + +func (r *tagRepo) FindGroupByItem(ctx context.Context, tagID id.TagID) (*tag.Group, error) { + return r.findGroupOne(ctx, bson.M{ + "group.tags": tagID.String(), + }) +} + +func (r *tagRepo) Save(ctx context.Context, tag tag.Tag) error { + if !r.f.CanWrite(tag.Scene()) { + return repo.ErrOperationDenied + } + doc, tid := mongodoc.NewTag(tag) + return r.client.SaveOne(ctx, tid, doc) +} + +func (r *tagRepo) SaveAll(ctx context.Context, tags []*tag.Tag) error { + if tags == nil { + return nil + } + docs, ids := mongodoc.NewTags(tags, r.f.Writable) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *tagRepo) Remove(ctx context.Context, id id.TagID) error { + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) +} + +func (r *tagRepo) RemoveAll(ctx context.Context, ids id.TagIDList) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ + "id": bson.M{"$in": ids.Strings()}, + })) +} + +func (r *tagRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + _, err := r.client.Collection().DeleteMany(ctx, bson.M{ + "scene": sceneID.String(), + }) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +func (r *tagRepo) find(ctx context.Context, dst []*tag.Tag, filter interface{}) ([]*tag.Tag, error) { + c := mongodoc.TagConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *tagRepo) findOne(ctx context.Context, filter interface{}) (tag.Tag, error) { + c := mongodoc.TagConsumer{} + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + if len(c.Rows) == 0 { + return nil, rerror.ErrNotFound + } + return *c.Rows[0], nil +} + +func (r *tagRepo) findItemOne(ctx context.Context, filter interface{}) (*tag.Item, error) { + c := mongodoc.TagConsumer{} + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + if len(c.ItemRows) == 0 { + return nil, rerror.ErrNotFound + } + return c.ItemRows[0], nil +} + +func (r *tagRepo) findGroupOne(ctx context.Context, filter interface{}) (*tag.Group, error) { + c := mongodoc.TagConsumer{} + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + if len(c.GroupRows) == 0 { + return nil, rerror.ErrNotFound + } + return c.GroupRows[0], nil +} + +func (r *tagRepo) findItems(ctx context.Context, dst []*tag.Item, filter interface{}) ([]*tag.Item, error) { + c := mongodoc.TagConsumer{ + ItemRows: dst, + } + if c.ItemRows != nil { + c.Rows = make([]*tag.Tag, 0, len(c.ItemRows)) + } + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.ItemRows, nil +} + +func (r *tagRepo) findGroups(ctx context.Context, dst []*tag.Group, filter interface{}) ([]*tag.Group, error) { + c := mongodoc.TagConsumer{ + GroupRows: dst, + } + if c.GroupRows != nil { + c.Rows = make([]*tag.Tag, 0, len(c.GroupRows)) + } + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { + return nil, err + } + return c.GroupRows, nil +} + +func filterTags(ids id.TagIDList, rows []*tag.Tag) []*tag.Tag { + res := make([]*tag.Tag, 0, len(ids)) + for _, tid := range ids { + var r2 *tag.Tag + for _, r := range rows { + if r == nil { + continue + } + if r3 := *r; r3 != nil && r3.ID() == tid { + r2 = &r3 + break + } + } + res = append(res, r2) + } + return res +} + +func filterTagItems(ids id.TagIDList, rows []*tag.Item) []*tag.Item { + res := make([]*tag.Item, 0, len(ids)) + for _, tid := range ids { + var r2 *tag.Item + for _, r := range rows { + if r != nil && r.ID() == tid { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func filterTagGroups(ids id.TagIDList, rows []*tag.Group) []*tag.Group { + res := make([]*tag.Group, 0, len(ids)) + for _, tid := range ids { + var r2 *tag.Group + for _, r := range rows { + if r != nil && r.ID() == tid { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (r *tagRepo) readFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Readable) +} + +func (r *tagRepo) writeFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Writable) +} diff --git a/server/internal/infrastructure/mongo/team.go b/server/internal/infrastructure/mongo/team.go new file mode 100644 index 000000000..256b936b7 --- /dev/null +++ b/server/internal/infrastructure/mongo/team.go @@ -0,0 +1,112 @@ +package mongo + +import ( + "context" + "strings" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/user" + "go.mongodb.org/mongo-driver/bson" +) + +type teamRepo struct { + client *mongodoc.ClientCollection +} + +func NewTeam(client *mongodoc.Client) repo.Team { + r := &teamRepo{client: client.WithCollection("team")} + r.init() + return r +} + +func (r *teamRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "team", i) + } +} + +func (r *teamRepo) FindByUser(ctx context.Context, id id.UserID) (user.TeamList, error) { + return r.find(ctx, nil, bson.M{ + "members." + strings.Replace(id.String(), ".", "", -1): bson.M{ + "$exists": true, + }, + }) +} + +func (r *teamRepo) FindByIDs(ctx context.Context, ids id.TeamIDList) (user.TeamList, error) { + if len(ids) == 0 { + return nil, nil + } + + dst := make([]*user.Team, 0, len(ids)) + res, err := r.find(ctx, dst, bson.M{ + "id": bson.M{"$in": ids.Strings()}, + }) + if err != nil { + return nil, err + } + return filterTeams(ids, res), nil +} + +func (r *teamRepo) FindByID(ctx context.Context, id id.TeamID) (*user.Team, error) { + return r.findOne(ctx, bson.M{"id": id.String()}) +} + +func (r *teamRepo) Save(ctx context.Context, team *user.Team) error { + doc, id := mongodoc.NewTeam(team) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *teamRepo) SaveAll(ctx context.Context, teams []*user.Team) error { + if len(teams) == 0 { + return nil + } + docs, ids := mongodoc.NewTeams(teams) + docs2 := make([]interface{}, 0, len(teams)) + for _, d := range docs { + docs2 = append(docs2, d) + } + return r.client.SaveAll(ctx, ids, docs2) +} + +func (r *teamRepo) Remove(ctx context.Context, id id.TeamID) error { + return r.client.RemoveOne(ctx, bson.M{"id": id.String()}) +} + +func (r *teamRepo) RemoveAll(ctx context.Context, ids id.TeamIDList) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, bson.M{ + "id": bson.M{"$in": ids.Strings()}, + }) +} + +func (r *teamRepo) find(ctx context.Context, dst []*user.Team, filter interface{}) (user.TeamList, error) { + c := mongodoc.TeamConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *teamRepo) findOne(ctx context.Context, filter interface{}) (*user.Team, error) { + dst := make([]*user.Team, 0, 1) + c := mongodoc.TeamConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func filterTeams(ids []id.TeamID, rows user.TeamList) user.TeamList { + return rows.FilterByID(ids...) +} diff --git a/server/internal/infrastructure/mongo/transaction.go b/server/internal/infrastructure/mongo/transaction.go new file mode 100644 index 000000000..b9ca9935e --- /dev/null +++ b/server/internal/infrastructure/mongo/transaction.go @@ -0,0 +1,20 @@ +package mongo + +import ( + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +type Transaction struct { + client *mongodoc.Client +} + +func NewTransaction(client *mongodoc.Client) repo.Transaction { + return &Transaction{ + client: client, + } +} + +func (t *Transaction) Begin() (repo.Tx, error) { + return t.client.BeginTransaction() +} diff --git a/server/internal/infrastructure/mongo/user.go b/server/internal/infrastructure/mongo/user.go new file mode 100644 index 000000000..be2d85c09 --- /dev/null +++ b/server/internal/infrastructure/mongo/user.go @@ -0,0 +1,137 @@ +package mongo + +import ( + "context" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/user" +) + +type userRepo struct { + client *mongodoc.ClientCollection +} + +func NewUser(client *mongodoc.Client) repo.User { + r := &userRepo{client: client.WithCollection("user")} + r.init() + return r +} + +func (r *userRepo) init() { + i := r.client.CreateUniqueIndex(context.Background(), []string{"email", "name", "auth0sublist"}, []string{"name"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "user", i) + } +} + +func (r *userRepo) FindByIDs(ctx context.Context, ids id.UserIDList) ([]*user.User, error) { + if len(ids) == 0 { + return nil, nil + } + + dst := make([]*user.User, 0, len(ids)) + res, err := r.find(ctx, dst, bson.M{ + "id": bson.M{"$in": ids.Strings()}, + }) + if err != nil { + return nil, err + } + return filterUsers(ids, res), nil +} + +func (r *userRepo) FindByID(ctx context.Context, id2 id.UserID) (*user.User, error) { + return r.findOne(ctx, bson.M{"id": id2.String()}) +} + +func (r *userRepo) FindByAuth0Sub(ctx context.Context, auth0sub string) (*user.User, error) { + return r.findOne(ctx, bson.M{ + "$or": []bson.M{ + {"auth0sub": auth0sub}, + { + "auth0sublist": bson.M{ + "$elemMatch": bson.M{ + "$eq": auth0sub, + }, + }, + }, + }, + }) +} + +func (r *userRepo) FindByEmail(ctx context.Context, email string) (*user.User, error) { + return r.findOne(ctx, bson.M{"email": email}) +} + +func (r *userRepo) FindByName(ctx context.Context, name string) (*user.User, error) { + return r.findOne(ctx, bson.M{"name": name}) +} + +func (r *userRepo) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user.User, error) { + return r.findOne(ctx, bson.M{ + "$or": []bson.M{ + {"email": nameOrEmail}, + {"name": nameOrEmail}, + }, + }) +} + +func (r *userRepo) FindByVerification(ctx context.Context, code string) (*user.User, error) { + return r.findOne(ctx, bson.M{ + "verification.code": code, + }) +} + +func (r *userRepo) FindByPasswordResetRequest(ctx context.Context, pwdResetToken string) (*user.User, error) { + return r.findOne(ctx, bson.M{ + "passwordreset.token": pwdResetToken, + }) +} + +func (r *userRepo) Save(ctx context.Context, user *user.User) error { + doc, id := mongodoc.NewUser(user) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *userRepo) Remove(ctx context.Context, user id.UserID) error { + return r.client.RemoveOne(ctx, bson.M{"id": user.String()}) +} + +func (r *userRepo) find(ctx context.Context, dst []*user.User, filter interface{}) ([]*user.User, error) { + c := mongodoc.UserConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *userRepo) findOne(ctx context.Context, filter interface{}) (*user.User, error) { + c := mongodoc.UserConsumer{ + Rows: make([]*user.User, 0, 1), + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func filterUsers(ids []id.UserID, rows []*user.User) []*user.User { + res := make([]*user.User, 0, len(ids)) + for _, id := range ids { + var r2 *user.User + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} diff --git a/server/internal/usecase/cursor.go b/server/internal/usecase/cursor.go new file mode 100644 index 000000000..aed24547c --- /dev/null +++ b/server/internal/usecase/cursor.go @@ -0,0 +1 @@ +package usecase diff --git a/server/internal/usecase/gateway/authenticator.go b/server/internal/usecase/gateway/authenticator.go new file mode 100644 index 000000000..86a30b6a7 --- /dev/null +++ b/server/internal/usecase/gateway/authenticator.go @@ -0,0 +1,19 @@ +package gateway + +type AuthenticatorUpdateUserParam struct { + ID string + Name *string + Email *string + Password *string +} + +type AuthenticatorUser struct { + ID string + Name string + Email string + EmailVerified bool +} + +type Authenticator interface { + UpdateUser(AuthenticatorUpdateUserParam) (AuthenticatorUser, error) +} diff --git a/server/internal/usecase/gateway/container.go b/server/internal/usecase/gateway/container.go new file mode 100644 index 000000000..1ed7743fc --- /dev/null +++ b/server/internal/usecase/gateway/container.go @@ -0,0 +1,10 @@ +package gateway + +type Container struct { + Authenticator Authenticator + Mailer Mailer + DataSource DataSource + PluginRegistry PluginRegistry + File File + Google Google +} diff --git a/server/internal/usecase/gateway/datasouce.go b/server/internal/usecase/gateway/datasouce.go new file mode 100644 index 000000000..94d72bc9c --- /dev/null +++ b/server/internal/usecase/gateway/datasouce.go @@ -0,0 +1,18 @@ +package gateway + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + ErrDataSourceInvalidURL error = errors.New("invalid url") +) + +type DataSource interface { + Fetch(context.Context, string, id.SceneID) ([]*dataset.Schema, []*dataset.Dataset, error) + IsURLValid(context.Context, string) bool +} diff --git a/server/internal/usecase/gateway/file.go b/server/internal/usecase/gateway/file.go new file mode 100644 index 000000000..2eb86d4fd --- /dev/null +++ b/server/internal/usecase/gateway/file.go @@ -0,0 +1,31 @@ +package gateway + +import ( + "context" + "errors" + "io" + "net/url" + + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + ErrInvalidFile error = errors.New("invalid file") + ErrFailedToUploadFile error = errors.New("failed to upload file") + ErrFileTooLarge error = errors.New("file too large") + ErrFailedToRemoveFile error = errors.New("failed to remove file") +) + +type File interface { + ReadAsset(context.Context, string) (io.ReadCloser, error) + UploadAsset(context.Context, *file.File) (*url.URL, error) + RemoveAsset(context.Context, *url.URL) error + ReadPluginFile(context.Context, id.PluginID, string) (io.ReadCloser, error) + UploadPluginFile(context.Context, id.PluginID, *file.File) error + RemovePlugin(context.Context, id.PluginID) error + UploadBuiltScene(context.Context, io.Reader, string) error + ReadBuiltSceneFile(context.Context, string) (io.ReadCloser, error) + MoveBuiltScene(context.Context, string, string) error + RemoveBuiltScene(context.Context, string) error +} diff --git a/server/internal/usecase/gateway/google.go b/server/internal/usecase/gateway/google.go new file mode 100644 index 000000000..26655fbab --- /dev/null +++ b/server/internal/usecase/gateway/google.go @@ -0,0 +1,9 @@ +package gateway + +import ( + "io" +) + +type Google interface { + FetchCSV(token string, fileId string, sheetName string) (*io.ReadCloser, error) +} diff --git a/server/internal/usecase/gateway/mailer.go b/server/internal/usecase/gateway/mailer.go new file mode 100644 index 000000000..3784d29fc --- /dev/null +++ b/server/internal/usecase/gateway/mailer.go @@ -0,0 +1,10 @@ +package gateway + +type Contact struct { + Email string + Name string +} + +type Mailer interface { + SendMail(toContacts []Contact, subject, plainContent, htmlContent string) error +} diff --git a/server/internal/usecase/gateway/plugin_registry.go b/server/internal/usecase/gateway/plugin_registry.go new file mode 100644 index 000000000..dc349a247 --- /dev/null +++ b/server/internal/usecase/gateway/plugin_registry.go @@ -0,0 +1,15 @@ +package gateway + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin/pluginpack" +) + +var ErrFailedToFetchDataFromPluginRegistry = errors.New("failed to fetch data from the plugin registry") + +type PluginRegistry interface { + FetchPluginPackage(context.Context, id.PluginID) (*pluginpack.Package, error) +} diff --git a/server/internal/usecase/interactor/asset.go b/server/internal/usecase/interactor/asset.go new file mode 100644 index 000000000..288ca548a --- /dev/null +++ b/server/internal/usecase/interactor/asset.go @@ -0,0 +1,103 @@ +package interactor + +import ( + "context" + "net/url" + "path" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" +) + +type Asset struct { + repos *repo.Container + gateways *gateway.Container +} + +func NewAsset(r *repo.Container, g *gateway.Container) interfaces.Asset { + return &Asset{ + repos: r, + gateways: g, + } +} + +func (i *Asset) Fetch(ctx context.Context, assets []id.AssetID, operator *usecase.Operator) ([]*asset.Asset, error) { + return i.repos.Asset.FindByIDs(ctx, assets) +} + +func (i *Asset) FindByTeam(ctx context.Context, tid id.TeamID, keyword *string, sort *asset.SortType, p *usecase.Pagination, operator *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) { + return Run2( + ctx, operator, i.repos, + Usecase().WithReadableTeams(tid), + func() ([]*asset.Asset, *usecase.PageInfo, error) { + return i.repos.Asset.FindByTeam(ctx, tid, repo.AssetFilter{ + Sort: sort, + Keyword: keyword, + Pagination: p, + }) + }, + ) +} + +func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, operator *usecase.Operator) (result *asset.Asset, err error) { + if inp.File == nil { + return nil, interfaces.ErrFileNotIncluded + } + return Run1( + ctx, operator, i.repos, + Usecase(). + WithWritableTeams(inp.TeamID). + Transaction(), + func() (*asset.Asset, error) { + url, err := i.gateways.File.UploadAsset(ctx, inp.File) + if err != nil { + return nil, err + } + + a, err := asset.New(). + NewID(). + Team(inp.TeamID). + Name(path.Base(inp.File.Path)). + Size(inp.File.Size). + URL(url.String()). + Build() + if err != nil { + return nil, err + } + + if err := i.repos.Asset.Save(ctx, a); err != nil { + return nil, err + } + + return a, nil + }) +} + +func (i *Asset) Remove(ctx context.Context, aid id.AssetID, operator *usecase.Operator) (result id.AssetID, err error) { + return Run1( + ctx, operator, i.repos, + Usecase().Transaction(), + func() (id.AssetID, error) { + asset, err := i.repos.Asset.FindByID(ctx, aid) + if err != nil { + return aid, err + } + + if ok := operator.IsWritableTeam(asset.Team()); !ok { + return aid, interfaces.ErrOperationDenied + } + + if url, _ := url.Parse(asset.URL()); url != nil { + if err := i.gateways.File.RemoveAsset(ctx, url); err != nil { + return aid, err + } + } + + return aid, i.repos.Asset.Remove(ctx, aid) + }, + ) +} diff --git a/server/internal/usecase/interactor/asset_test.go b/server/internal/usecase/interactor/asset_test.go new file mode 100644 index 000000000..2570db134 --- /dev/null +++ b/server/internal/usecase/interactor/asset_test.go @@ -0,0 +1,69 @@ +package interactor + +import ( + "bytes" + "context" + "io" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/fs" + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestAsset_Create(t *testing.T) { + ctx := context.Background() + tid := asset.NewTeamID() + aid := asset.NewID() + newID := asset.NewID + asset.NewID = func() asset.ID { return aid } + t.Cleanup(func() { asset.NewID = newID }) + + mfs := afero.NewMemMapFs() + f, _ := fs.NewFile(mfs, "") + repos := memory.New() + transaction := memory.NewTransaction() + repos.Transaction = transaction + uc := &Asset{ + repos: repos, + gateways: &gateway.Container{ + File: f, + }, + } + buf := bytes.NewBufferString("Hello") + buflen := int64(buf.Len()) + res, err := uc.Create(ctx, interfaces.CreateAssetParam{ + TeamID: tid, + File: &file.File{ + Content: io.NopCloser(buf), + Path: "hoge.txt", + ContentType: "", + Size: buflen, + }, + }, &usecase.Operator{ + WritableTeams: id.TeamIDList{tid}, + }) + + want := asset.New(). + ID(aid). + Team(tid). + URL(res.URL()). + CreatedAt(aid.Timestamp()). + Name("hoge.txt"). + Size(buflen). + ContentType(""). + MustBuild() + + assert.NoError(t, err) + assert.Equal(t, want, res) + assert.Equal(t, 1, transaction.Committed()) + a, _ := repos.Asset.FindByID(ctx, aid) + assert.Equal(t, want, a) +} diff --git a/server/internal/usecase/interactor/auth.go b/server/internal/usecase/interactor/auth.go new file mode 100644 index 000000000..1f01ebb65 --- /dev/null +++ b/server/internal/usecase/interactor/auth.go @@ -0,0 +1,414 @@ +package interactor + +import ( + "context" + "crypto/rand" + "crypto/rsa" + "crypto/x509" + "crypto/x509/pkix" + "encoding/pem" + "errors" + "fmt" + "math/big" + "time" + + "github.com/caos/oidc/pkg/oidc" + "github.com/caos/oidc/pkg/op" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/auth" + config2 "github.com/reearth/reearth-backend/pkg/config" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/user" + "gopkg.in/square/go-jose.v2" +) + +type AuthStorage struct { + appConfig *StorageConfig + getUserBySubject func(context.Context, string) (*user.User, error) + clients map[string]op.Client + requests repo.AuthRequest + keySet jose.JSONWebKeySet + key *rsa.PrivateKey + sigKey jose.SigningKey +} + +type StorageConfig struct { + Domain string `default:"http://localhost:8080"` + ClientDomain string `default:"http://localhost:8080"` + Debug bool + DN *AuthDNConfig +} + +type AuthDNConfig struct { + CommonName string + Organization []string + OrganizationalUnit []string + Country []string + Province []string + Locality []string + StreetAddress []string + PostalCode []string +} + +var dummyName = pkix.Name{ + CommonName: "Dummy company, INC.", + Organization: []string{"Dummy company, INC."}, + OrganizationalUnit: []string{"Dummy OU"}, + Country: []string{"US"}, + Province: []string{"Dummy"}, + Locality: []string{"Dummy locality"}, + StreetAddress: []string{"Dummy street"}, + PostalCode: []string{"1"}, +} + +func NewAuthStorage(ctx context.Context, cfg *StorageConfig, request repo.AuthRequest, config repo.Config, getUserBySubject func(context.Context, string) (*user.User, error)) (op.Storage, error) { + client := auth.NewLocalClient(cfg.Debug, cfg.ClientDomain) + + name := dummyName + if cfg.DN != nil { + name = pkix.Name{ + CommonName: cfg.DN.CommonName, + Organization: cfg.DN.Organization, + OrganizationalUnit: cfg.DN.OrganizationalUnit, + Country: cfg.DN.Country, + Province: cfg.DN.Province, + Locality: cfg.DN.Locality, + StreetAddress: cfg.DN.StreetAddress, + PostalCode: cfg.DN.PostalCode, + } + } + c, err := config.LockAndLoad(ctx) + if err != nil { + return nil, fmt.Errorf("could not load auth config: %w\n", err) + } + defer func() { + if err := config.Unlock(ctx); err != nil { + log.Errorf("auth: could not release config lock: %s\n", err) + } + }() + + var keyBytes, certBytes []byte + if c.Auth != nil { + keyBytes = []byte(c.Auth.Key) + certBytes = []byte(c.Auth.Cert) + } else { + keyBytes, certBytes, err = generateCert(name) + if err != nil { + return nil, fmt.Errorf("could not generate raw cert: %w\n", err) + } + c.Auth = &config2.Auth{ + Key: string(keyBytes), + Cert: string(certBytes), + } + + if err := config.Save(ctx, c); err != nil { + return nil, fmt.Errorf("could not save raw cert: %w\n", err) + } + log.Info("auth: init a new private key and certificate") + } + + key, sigKey, keySet, err := initKeys(keyBytes, certBytes) + if err != nil { + return nil, fmt.Errorf("could not init keys: %w\n", err) + } + + return &AuthStorage{ + appConfig: cfg, + getUserBySubject: getUserBySubject, + requests: request, + key: key, + sigKey: *sigKey, + keySet: *keySet, + clients: map[string]op.Client{ + client.GetID(): client, + }, + }, nil +} + +func initKeys(keyBytes, certBytes []byte) (*rsa.PrivateKey, *jose.SigningKey, *jose.JSONWebKeySet, error) { + keyBlock, _ := pem.Decode(keyBytes) + if keyBlock == nil { + return nil, nil, nil, fmt.Errorf("failed to decode the key bytes") + } + key, err := x509.ParsePKCS1PrivateKey(keyBlock.Bytes) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to parse the private key bytes: %w\n", err) + } + + var certActualBytes []byte + certBlock, _ := pem.Decode(certBytes) + if certBlock == nil { + certActualBytes = certBytes // backwards compatibility + } else { + certActualBytes = certBlock.Bytes + } + + var cert *x509.Certificate + cert, err = x509.ParseCertificate(certActualBytes) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to parse the cert bytes: %w\n", err) + } + + keyID := "RE01" + sk := jose.SigningKey{ + Algorithm: jose.RS256, + Key: jose.JSONWebKey{Key: key, Use: "sig", Algorithm: string(jose.RS256), KeyID: keyID, Certificates: []*x509.Certificate{cert}}, + } + + return key, &sk, &jose.JSONWebKeySet{ + Keys: []jose.JSONWebKey{ + {Key: key.Public(), Use: "sig", Algorithm: string(jose.RS256), KeyID: keyID, Certificates: []*x509.Certificate{cert}}, + }, + }, nil +} + +func generateCert(name pkix.Name) (keyPem, certPem []byte, err error) { + key, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + err = fmt.Errorf("failed to generate key: %w\n", err) + return + } + + keyPem = pem.EncodeToMemory(&pem.Block{ + Type: "RSA PRIVATE KEY", + Bytes: x509.MarshalPKCS1PrivateKey(key), + }) + + cert := &x509.Certificate{ + SerialNumber: big.NewInt(1), + Subject: name, + NotBefore: time.Now(), + NotAfter: time.Now().AddDate(100, 0, 0), + IsCA: true, + KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign | x509.KeyUsageCRLSign, + } + + certBytes, err := x509.CreateCertificate(rand.Reader, cert, cert, key.Public(), key) + if err != nil { + err = fmt.Errorf("failed to create the cert: %w\n", err) + } + + certPem = pem.EncodeToMemory(&pem.Block{ + Type: "CERTIFICATE", + Bytes: certBytes, + }) + return +} + +func (s *AuthStorage) Health(_ context.Context) error { + return nil +} + +func (s *AuthStorage) CreateAuthRequest(ctx context.Context, authReq *oidc.AuthRequest, _ string) (op.AuthRequest, error) { + audiences := []string{ + s.appConfig.Domain, + } + if s.appConfig.Debug { + audiences = append(audiences, "http://localhost:8080") + } + + var cc *oidc.CodeChallenge + if authReq.CodeChallenge != "" { + cc = &oidc.CodeChallenge{ + Challenge: authReq.CodeChallenge, + Method: authReq.CodeChallengeMethod, + } + } + var request = auth.NewRequest(). + NewID(). + ClientID(authReq.ClientID). + State(authReq.State). + ResponseType(authReq.ResponseType). + Scopes(authReq.Scopes). + Audiences(audiences). + RedirectURI(authReq.RedirectURI). + Nonce(authReq.Nonce). + CodeChallenge(cc). + CreatedAt(time.Now().UTC()). + AuthorizedAt(nil). + MustBuild() + + if err := s.requests.Save(ctx, request); err != nil { + return nil, err + } + return request, nil +} + +func (s *AuthStorage) AuthRequestByID(ctx context.Context, requestID string) (op.AuthRequest, error) { + if requestID == "" { + return nil, errors.New("invalid id") + } + reqId, err := id.AuthRequestIDFrom(requestID) + if err != nil { + return nil, err + } + request, err := s.requests.FindByID(ctx, reqId) + if err != nil { + return nil, err + } + return request, nil +} + +func (s *AuthStorage) AuthRequestByCode(ctx context.Context, code string) (op.AuthRequest, error) { + if code == "" { + return nil, errors.New("invalid code") + } + return s.requests.FindByCode(ctx, code) +} + +func (s *AuthStorage) AuthRequestBySubject(ctx context.Context, subject string) (op.AuthRequest, error) { + if subject == "" { + return nil, errors.New("invalid subject") + } + + return s.requests.FindBySubject(ctx, subject) +} + +func (s *AuthStorage) SaveAuthCode(ctx context.Context, requestID, code string) error { + request, err := s.AuthRequestByID(ctx, requestID) + if err != nil { + return err + } + request2 := request.(*auth.Request) + request2.SetCode(code) + err = s.updateRequest(ctx, requestID, *request2) + return err +} + +func (s *AuthStorage) DeleteAuthRequest(_ context.Context, requestID string) error { + delete(s.clients, requestID) + return nil +} + +func (s *AuthStorage) CreateAccessToken(_ context.Context, _ op.TokenRequest) (string, time.Time, error) { + return "id", time.Now().UTC().Add(5 * time.Hour), nil +} + +func (s *AuthStorage) CreateAccessAndRefreshTokens(_ context.Context, request op.TokenRequest, _ string) (accessTokenID string, newRefreshToken string, expiration time.Time, err error) { + authReq := request.(*auth.Request) + return "id", authReq.GetID(), time.Now().UTC().Add(5 * time.Minute), nil +} + +func (s *AuthStorage) TokenRequestByRefreshToken(ctx context.Context, refreshToken string) (op.RefreshTokenRequest, error) { + r, err := s.AuthRequestByID(ctx, refreshToken) + if err != nil { + return nil, err + } + return r.(op.RefreshTokenRequest), err +} + +func (s *AuthStorage) TerminateSession(_ context.Context, _, _ string) error { + return errors.New("not implemented") +} + +func (s *AuthStorage) GetSigningKey(_ context.Context, keyCh chan<- jose.SigningKey) { + keyCh <- s.sigKey +} + +func (s *AuthStorage) GetKeySet(_ context.Context) (*jose.JSONWebKeySet, error) { + return &s.keySet, nil +} + +func (s *AuthStorage) GetKeyByIDAndUserID(_ context.Context, kid, _ string) (*jose.JSONWebKey, error) { + return &s.keySet.Key(kid)[0], nil +} + +func (s *AuthStorage) GetClientByClientID(_ context.Context, clientID string) (op.Client, error) { + + if clientID == "" { + return nil, errors.New("invalid client id") + } + + client, exists := s.clients[clientID] + if !exists { + return nil, errors.New("not found") + } + + return client, nil +} + +func (s *AuthStorage) AuthorizeClientIDSecret(_ context.Context, _ string, _ string) error { + return nil +} + +func (s *AuthStorage) SetUserinfoFromToken(ctx context.Context, userinfo oidc.UserInfoSetter, _, _, _ string) error { + return s.SetUserinfoFromScopes(ctx, userinfo, "", "", []string{}) +} + +func (s *AuthStorage) SetUserinfoFromScopes(ctx context.Context, userinfo oidc.UserInfoSetter, subject, _ string, _ []string) error { + + request, err := s.AuthRequestBySubject(ctx, subject) + if err != nil { + return err + } + + u, err := s.getUserBySubject(ctx, subject) + if err != nil { + return err + } + + userinfo.SetSubject(request.GetSubject()) + userinfo.SetEmail(u.Email(), true) + userinfo.SetName(u.Name()) + userinfo.AppendClaims("lang", u.Lang()) + userinfo.AppendClaims("theme", u.Theme()) + + return nil +} + +func (s *AuthStorage) GetPrivateClaimsFromScopes(_ context.Context, _, _ string, _ []string) (map[string]interface{}, error) { + return map[string]interface{}{"private_claim": "test"}, nil +} + +func (s *AuthStorage) SetIntrospectionFromToken(ctx context.Context, introspect oidc.IntrospectionResponse, _, subject, clientID string) error { + if err := s.SetUserinfoFromScopes(ctx, introspect, subject, clientID, []string{}); err != nil { + return err + } + request, err := s.AuthRequestBySubject(ctx, subject) + if err != nil { + return err + } + introspect.SetClientID(request.GetClientID()) + return nil +} + +func (s *AuthStorage) ValidateJWTProfileScopes(_ context.Context, _ string, scope []string) ([]string, error) { + return scope, nil +} + +func (s *AuthStorage) RevokeToken(_ context.Context, _ string, _ string, _ string) *oidc.Error { + // TODO implement me + panic("implement me") +} + +func (s *AuthStorage) CompleteAuthRequest(ctx context.Context, requestId, sub string) error { + request, err := s.AuthRequestByID(ctx, requestId) + if err != nil { + return err + } + req := request.(*auth.Request) + req.Complete(sub) + err = s.updateRequest(ctx, requestId, *req) + return err +} + +func (s *AuthStorage) updateRequest(ctx context.Context, requestID string, req auth.Request) error { + if requestID == "" { + return errors.New("invalid id") + } + reqId, err := id.AuthRequestIDFrom(requestID) + if err != nil { + return err + } + + if _, err := s.requests.FindByID(ctx, reqId); err != nil { + return err + } + + if err := s.requests.Save(ctx, &req); err != nil { + return err + } + + return nil +} diff --git a/server/internal/usecase/interactor/common.go b/server/internal/usecase/interactor/common.go new file mode 100644 index 000000000..10abb9960 --- /dev/null +++ b/server/internal/usecase/interactor/common.go @@ -0,0 +1,229 @@ +package interactor + +import ( + "context" + "errors" + "net/url" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type ContainerConfig struct { + SignupSecret string + AuthSrvUIDomain string + PublishedIndexHTML string + PublishedIndexURL *url.URL +} + +func NewContainer(r *repo.Container, g *gateway.Container, config ContainerConfig) interfaces.Container { + var published interfaces.Published + if config.PublishedIndexURL != nil && config.PublishedIndexURL.String() != "" { + published = NewPublishedWithURL(r.Project, g.File, config.PublishedIndexURL) + } else { + published = NewPublished(r.Project, g.File, config.PublishedIndexHTML) + } + + return interfaces.Container{ + Asset: NewAsset(r, g), + Dataset: NewDataset(r, g), + Layer: NewLayer(r), + Plugin: NewPlugin(r, g), + Project: NewProject(r, g), + Property: NewProperty(r, g), + Published: published, + Scene: NewScene(r, g), + Tag: NewTag(r), + Team: NewTeam(r), + User: NewUser(r, g, config.SignupSecret, config.AuthSrvUIDomain), + } +} + +// Deprecated: common will be deprecated. Please use the Usecase function instead. +type common struct{} + +func (common) OnlyOperator(op *usecase.Operator) error { + if op == nil { + return interfaces.ErrOperationDenied + } + return nil +} + +func (i common) CanReadTeam(t id.TeamID, op *usecase.Operator) error { + if err := i.OnlyOperator(op); err != nil { + return err + } + if !op.IsReadableTeam(t) { + return interfaces.ErrOperationDenied + } + return nil +} + +func (i common) CanWriteTeam(t id.TeamID, op *usecase.Operator) error { + if err := i.OnlyOperator(op); err != nil { + return err + } + if !op.IsWritableTeam(t) { + return interfaces.ErrOperationDenied + } + return nil +} + +func (i common) CanReadScene(t id.SceneID, op *usecase.Operator) error { + if err := i.OnlyOperator(op); err != nil { + return err + } + if !op.IsReadableScene(t) { + return interfaces.ErrOperationDenied + } + return nil +} + +func (i common) CanWriteScene(t id.SceneID, op *usecase.Operator) error { + if err := i.OnlyOperator(op); err != nil { + return err + } + if !op.IsWritableScene(t) { + return interfaces.ErrOperationDenied + } + return nil +} + +type commonSceneLock struct { + sceneLockRepo repo.SceneLock +} + +func (i commonSceneLock) CheckSceneLock(ctx context.Context, s id.SceneID) error { + // check scene lock + if lock, err := i.sceneLockRepo.GetLock(ctx, s); err != nil { + return err + } else if lock.IsLocked() { + return interfaces.ErrSceneIsLocked + } + return nil +} + +func (i commonSceneLock) UpdateSceneLock(ctx context.Context, s id.SceneID, before, after scene.LockMode) error { + // get lock + lm, err := i.sceneLockRepo.GetLock(ctx, s) + if err != nil { + return err + } + + // check lock + if lm != before { + return scene.ErrSceneIsLocked + } + + // change lock + err = i.sceneLockRepo.SaveLock(ctx, s, after) + if err != nil { + return err + } + return nil +} + +func (i commonSceneLock) ReleaseSceneLock(ctx context.Context, s id.SceneID) { + _ = i.sceneLockRepo.SaveLock(ctx, s, scene.LockModeFree) +} + +type SceneDeleter struct { + Scene repo.Scene + SceneLock repo.SceneLock + Layer repo.Layer + Property repo.Property + Dataset repo.Dataset + DatasetSchema repo.DatasetSchema +} + +func (d SceneDeleter) Delete(ctx context.Context, s *scene.Scene, force bool) error { + if s == nil { + return nil + } + + if force { + lock, err := d.SceneLock.GetLock(ctx, s.ID()) + if err != nil { + return err + } + + if lock != scene.LockModeFree { + return scene.ErrSceneIsLocked + } + } + + // Delete layer + if err := d.Layer.RemoveByScene(ctx, s.ID()); err != nil { + return err + } + + // Delete property + if err := d.Property.RemoveByScene(ctx, s.ID()); err != nil { + return err + } + + // Delete dataset + if err := d.Dataset.RemoveByScene(ctx, s.ID()); err != nil { + return err + } + + // Delete dataset schema + if err := d.DatasetSchema.RemoveByScene(ctx, s.ID()); err != nil { + return err + } + + // Release scene lock + if err := d.SceneLock.SaveLock(ctx, s.ID(), scene.LockModeFree); err != nil { + return err + } + + // Delete scene + if err := d.Scene.Remove(ctx, s.ID()); err != nil { + return err + } + + return nil +} + +type ProjectDeleter struct { + SceneDeleter + File gateway.File + Project repo.Project +} + +func (d ProjectDeleter) Delete(ctx context.Context, prj *project.Project, force bool, operator *usecase.Operator) error { + if prj == nil { + return nil + } + + // Fetch scene + s, err := d.Scene.FindByProject(ctx, prj.ID()) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return err + } + + // Delete scene + if err := d.SceneDeleter.Delete(ctx, s, force); err != nil { + return err + } + + // Unpublish project + if prj.PublishmentStatus() != project.PublishmentStatusPrivate { + if err := d.File.RemoveBuiltScene(ctx, prj.Alias()); err != nil { + return err + } + } + + // Delete project + if err := d.Project.Remove(ctx, prj.ID()); err != nil { + return err + } + + return nil +} diff --git a/server/internal/usecase/interactor/dataset.go b/server/internal/usecase/interactor/dataset.go new file mode 100644 index 000000000..46f4a5521 --- /dev/null +++ b/server/internal/usecase/interactor/dataset.go @@ -0,0 +1,633 @@ +package interactor + +import ( + "context" + "errors" + "io" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/layer/layerops" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/scene/sceneops" +) + +var extensionForLinkedLayers = id.PluginExtensionID("marker") + +type Dataset struct { + common + commonSceneLock + datasetRepo repo.Dataset + datasetSchemaRepo repo.DatasetSchema + propertyRepo repo.Property + layerRepo repo.Layer + pluginRepo repo.Plugin + transaction repo.Transaction + datasource gateway.DataSource + file gateway.File + google gateway.Google +} + +func NewDataset(r *repo.Container, gr *gateway.Container) interfaces.Dataset { + return &Dataset{ + commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, + datasetRepo: r.Dataset, + datasetSchemaRepo: r.DatasetSchema, + propertyRepo: r.Property, + layerRepo: r.Layer, + pluginRepo: r.Plugin, + transaction: r.Transaction, + datasource: gr.DataSource, + file: gr.File, + google: gr.Google, + } +} + +func (i *Dataset) DynamicSchemaFields() []*dataset.SchemaField { + author, _ := dataset.NewSchemaField().NewID().Name("author").Type(dataset.ValueTypeString).Build() + content, _ := dataset.NewSchemaField().NewID().Name("content").Type(dataset.ValueTypeString).Build() + location, _ := dataset.NewSchemaField().NewID().Name("location").Type(dataset.ValueTypeLatLng).Build() + target, _ := dataset.NewSchemaField().NewID().Name("target").Type(dataset.ValueTypeString).Build() + return []*dataset.SchemaField{author, content, location, target} +} + +func (i *Dataset) UpdateDatasetSchema(ctx context.Context, inp interfaces.UpdateDatasetSchemaParam, operator *usecase.Operator) (_ *dataset.Schema, err error) { + schema, err := i.datasetSchemaRepo.FindByID(ctx, inp.SchemaId) + if err != nil { + return nil, err + } + + // Begin Db transaction + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + schema.Rename(inp.Name) + err = i.datasetSchemaRepo.Save(ctx, schema) + if err != nil { + return nil, err + } + + // Commit db transaction + tx.Commit() + return schema, nil +} + +func (i *Dataset) AddDynamicDatasetSchema(ctx context.Context, inp interfaces.AddDynamicDatasetSchemaParam) (_ *dataset.Schema, err error) { + // Begin Db transaction + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + schemaBuilder := dataset.NewSchema(). + NewID(). + Scene(inp.SceneId). + Dynamic(true) + fields := i.DynamicSchemaFields() + schemaBuilder = schemaBuilder.Fields(fields) + ds, err := schemaBuilder.Build() + if err != nil { + return nil, err + } + err = i.datasetSchemaRepo.Save(ctx, ds) + if err != nil { + return nil, err + } + + // Commit db transaction + tx.Commit() + return ds, nil +} + +func (i *Dataset) AddDynamicDataset(ctx context.Context, inp interfaces.AddDynamicDatasetParam) (_ *dataset.Schema, _ *dataset.Dataset, err error) { + // Begin Db transaction + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + fields := []*dataset.Field{} + dss, err := i.datasetSchemaRepo.FindDynamicByID(ctx, inp.SchemaId) + if err != nil { + return nil, nil, err + } + for _, f := range dss.Fields() { + if f.Name() == "author" { + fields = append(fields, dataset.NewField(f.ID(), dataset.ValueTypeString.ValueFrom(inp.Author), "")) + } + if f.Name() == "content" { + fields = append(fields, dataset.NewField(f.ID(), dataset.ValueTypeString.ValueFrom(inp.Content), "")) + } + if inp.Target != nil && len(*inp.Target) > 0 && f.Name() == "target" { + fields = append(fields, dataset.NewField(f.ID(), dataset.ValueTypeString.ValueFrom(inp.Target), "")) + } + if inp.Lat != nil && inp.Lng != nil && f.Name() == "location" { + latlng := dataset.LatLng{Lat: *inp.Lat, Lng: *inp.Lng} + fields = append(fields, dataset.NewField(f.ID(), dataset.ValueTypeLatLng.ValueFrom(latlng), "")) + } + } + ds, err := dataset. + New(). + NewID(). + Fields(fields). + Schema(inp.SchemaId). + Build() + if err != nil { + return nil, nil, err + } + err = i.datasetRepo.Save(ctx, ds) + if err != nil { + return nil, nil, err + } + + // Commit db transaction + tx.Commit() + return dss, ds, nil +} + +func (i *Dataset) ImportDataset(ctx context.Context, inp interfaces.ImportDatasetParam, operator *usecase.Operator) (_ *dataset.Schema, err error) { + if err := i.CanWriteScene(inp.SceneId, operator); err != nil { + return nil, err + } + if inp.File == nil { + return nil, interfaces.ErrFileNotIncluded + } + + separator := ',' + if strings.HasSuffix(inp.File.Path, ".tsv") { + separator = '\t' + } + + return i.importDataset(ctx, inp.File.Content, inp.File.Path, separator, inp.SceneId, inp.SchemaId) +} + +func (i *Dataset) ImportDatasetFromGoogleSheet(ctx context.Context, inp interfaces.ImportDatasetFromGoogleSheetParam, operator *usecase.Operator) (_ *dataset.Schema, err error) { + if err := i.CanWriteScene(inp.SceneId, operator); err != nil { + return nil, err + } + + csvFile, err := i.google.FetchCSV(inp.Token, inp.FileID, inp.SheetName) + if err != nil { + return nil, err + } + defer func() { + err = (*csvFile).Close() + if err != nil { + log.Fatal(err) + } + }() + + return i.importDataset(ctx, *csvFile, inp.SheetName, ',', inp.SceneId, inp.SchemaId) +} + +func (i *Dataset) importDataset(ctx context.Context, content io.Reader, name string, separator rune, sceneId id.SceneID, schemaId *id.DatasetSchemaID) (_ *dataset.Schema, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + csv := dataset.NewCSVParser(content, name, separator) + err = csv.Init() + if err != nil { + return nil, err + } + + // replacment mode + if schemaId != nil { + dss, err := i.datasetSchemaRepo.FindByID(ctx, *schemaId) + if err != nil { + return nil, err + } + err = csv.CheckCompatible(dss) + if err != nil { + return nil, err + } + toreplace, err := i.datasetRepo.FindBySchemaAll(ctx, *schemaId) + if err != nil { + return nil, err + } + err = i.datasetRepo.RemoveAll(ctx, toreplace.ToDatasetIds()) + if err != nil { + return nil, err + } + } else { + err = csv.GuessSchema(sceneId) + if err != nil { + return nil, err + } + } + + schema, datasets, err := csv.ReadAll() + if err != nil { + return nil, err + } + + err = i.datasetSchemaRepo.Save(ctx, schema) + if err != nil { + return nil, err + } + err = i.datasetRepo.SaveAll(ctx, datasets) + if err != nil { + return nil, err + } + + if schemaId != nil { + layergroups, err := i.layerRepo.FindGroupBySceneAndLinkedDatasetSchema(ctx, sceneId, *schemaId) + if err != nil { + return nil, err + } + + newProperties := make([]*property.Property, 0, len(datasets)) + representativeFieldID := schema.RepresentativeFieldID() + removedProperties := []id.PropertyID{} + removedLayers := []id.LayerID{} + updatedLayers := append(layer.List{}, layergroups.ToLayerList()...) + + for _, lg := range layergroups { + if lg.Layers().LayerCount() > 0 { + children, err := i.layerRepo.FindByIDs(ctx, lg.Layers().Layers()) + if err != nil { + return nil, err + } + for _, c := range children { + if c != nil { + removedProperties = append(removedProperties, (*c).Properties()...) + } + } + removedLayers = append(removedLayers, lg.Layers().Layers()...) + lg.Layers().Empty() + } + + for _, ds := range datasets { + dsid := ds.ID() + name := "" + if rf := ds.FieldRef(representativeFieldID); rf != nil && rf.Type() == dataset.ValueTypeString { + name = rf.Value().Value().(string) + } + layerItem, layerProperty, err := layerops.LayerItem{ + SceneID: sceneId, + ParentLayerID: lg.ID(), + Plugin: builtin.Plugin(), + ExtensionID: &extensionForLinkedLayers, + LinkedDatasetID: &dsid, + Name: name, + }.Initialize() + if err != nil { + return nil, err + } + if layerItem != nil { + lg.Layers().AddLayer(layerItem.ID(), -1) + updatedLayers = append(updatedLayers, layerItem.LayerRef()) + } + if layerProperty != nil { + newProperties = append(newProperties, layerProperty) + } + } + } + + err = i.layerRepo.RemoveAll(ctx, removedLayers) + if err != nil { + return nil, err + } + err = i.propertyRepo.RemoveAll(ctx, removedProperties) + if err != nil { + return nil, err + } + err = i.layerRepo.SaveAll(ctx, updatedLayers) + if err != nil { + return nil, err + } + err = i.propertyRepo.SaveAll(ctx, newProperties) + if err != nil { + return nil, err + } + } + + // Commit db transaction + tx.Commit() + return schema, nil +} + +func (i *Dataset) Fetch(ctx context.Context, ids []id.DatasetID, operator *usecase.Operator) (dataset.List, error) { + return i.datasetRepo.FindByIDs(ctx, ids) +} + +func (i *Dataset) GraphFetch(ctx context.Context, id id.DatasetID, depth int, operator *usecase.Operator) (dataset.List, error) { + if depth < 0 || depth > 3 { + return nil, interfaces.ErrDatasetInvalidDepth + } + it := dataset.GraphIteratorFrom(id, depth) + res := dataset.List{} + next := id + done := false + for { + d, err := i.datasetRepo.FindByID(ctx, next) + if err != nil { + return nil, err + } + res = append(res, d) + next, done = it.Next(d) + if next.IsNil() { + return nil, rerror.ErrInternalBy(errors.New("next id is nil")) + } + if done { + break + } + } + return res, nil +} + +func (i *Dataset) FetchSchema(ctx context.Context, ids []id.DatasetSchemaID, operator *usecase.Operator) (dataset.SchemaList, error) { + return i.datasetSchemaRepo.FindByIDs(ctx, ids) +} + +func (i *Dataset) GraphFetchSchema(ctx context.Context, id id.DatasetSchemaID, depth int, operator *usecase.Operator) (dataset.SchemaList, error) { + if depth < 0 || depth > 3 { + return nil, interfaces.ErrDatasetInvalidDepth + } + + it := dataset.SchemaGraphIteratorFrom(id, depth) + res := dataset.SchemaList{} + next := id + done := false + for { + d, err := i.datasetSchemaRepo.FindByID(ctx, next) + if err != nil { + return nil, err + } + res = append(res, d) + next, done = it.Next(d) + if next.IsNil() { + return nil, rerror.ErrInternalBy(errors.New("next id is nil")) + } + if done { + break + } + } + + return res, nil +} + +func (i *Dataset) FindBySchema(ctx context.Context, ds id.DatasetSchemaID, p *usecase.Pagination, operator *usecase.Operator) (dataset.List, *usecase.PageInfo, error) { + return i.datasetRepo.FindBySchema(ctx, ds, p) +} + +func (i *Dataset) CountBySchema(ctx context.Context, id id.DatasetSchemaID) (int, error) { + return i.datasetRepo.CountBySchema(ctx, id) +} + +func (i *Dataset) FindSchemaByScene(ctx context.Context, sid id.SceneID, p *usecase.Pagination, operator *usecase.Operator) (dataset.SchemaList, *usecase.PageInfo, error) { + if err := i.CanReadScene(sid, operator); err != nil { + return nil, nil, err + } + + return i.datasetSchemaRepo.FindByScene(ctx, sid, p) +} + +func (i *Dataset) FindDynamicSchemaByScene(ctx context.Context, sid id.SceneID) (dataset.SchemaList, error) { + return i.datasetSchemaRepo.FindAllDynamicByScene(ctx, sid) +} + +func (i *Dataset) Sync(ctx context.Context, sceneID id.SceneID, url string, operator *usecase.Operator) (dss dataset.SchemaList, ds dataset.List, err error) { + if err := i.CanWriteScene(sceneID, operator); err != nil { + return nil, nil, err + } + + if i.datasource == nil { + return nil, nil, interfaces.ErrNoDataSourceAvailable + } + + // Check URL + if !i.datasource.IsURLValid(ctx, url) { + return nil, nil, interfaces.ErrDataSourceInvalidURL + } + + // Begin Db transaction + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if err := i.UpdateSceneLock(ctx, sceneID, scene.LockModeFree, scene.LockModeDatasetSyncing); err != nil { + return nil, nil, err + } + + defer i.ReleaseSceneLock(ctx, sceneID) + + // Fetch + dss, ds, err = i.datasource.Fetch(ctx, url, sceneID) + if err != nil { + return nil, nil, err + } + + // Save + if err := i.datasetSchemaRepo.SaveAll(ctx, dss); err != nil { + return nil, nil, err + } + if err := i.datasetRepo.SaveAll(ctx, ds); err != nil { + return nil, nil, err + } + + // Migrate + result, err := sceneops.DatasetMigrator{ + PropertyRepo: i.propertyRepo, + LayerRepo: i.layerRepo, + DatasetSchemaRepo: i.datasetSchemaRepo, + DatasetRepo: i.datasetRepo, + Plugin: repo.PluginLoaderFrom(i.pluginRepo), + }.Migrate(ctx, sceneID, dss, ds) + if err != nil { + return nil, nil, err + } + + if err := i.propertyRepo.SaveAll(ctx, result.Properties.List()); err != nil { + return nil, nil, err + } + if err := i.layerRepo.SaveAll(ctx, result.Layers.List()); err != nil { + return nil, nil, err + } + if err := i.layerRepo.RemoveAll(ctx, result.RemovedLayers.List()); err != nil { + return nil, nil, err + } + if err := i.datasetRepo.RemoveAll(ctx, result.RemovedDatasets); err != nil { + return nil, nil, err + } + if err := i.datasetSchemaRepo.RemoveAll(ctx, result.RemovedDatasetSchemas); err != nil { + return nil, nil, err + } + + tx.Commit() + return dss, ds, nil +} + +func (i *Dataset) AddDatasetSchema(ctx context.Context, inp interfaces.AddDatasetSchemaParam, operator *usecase.Operator) (ds *dataset.Schema, err error) { + if err := i.CanWriteScene(inp.SceneId, operator); err != nil { + return nil, err + } + + // Begin Db transaction + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + schemaBuilder := dataset.NewSchema(). + NewID(). + Scene(inp.SceneId). + Name(inp.Name). + Source("reearth"). + RepresentativeField(*inp.RepresentativeField) + ds, err = schemaBuilder.Build() + if err != nil { + return nil, err + } + err = i.datasetSchemaRepo.Save(ctx, ds) + if err != nil { + return nil, err + } + + tx.Commit() + return ds, nil +} + +func (i *Dataset) RemoveDatasetSchema(ctx context.Context, inp interfaces.RemoveDatasetSchemaParam, operator *usecase.Operator) (_ id.DatasetSchemaID, err error) { + s, err := i.datasetSchemaRepo.FindByID(ctx, inp.SchemaID) + if err != nil { + return inp.SchemaID, err + } + if s == nil { + return inp.SchemaID, rerror.ErrNotFound + } + if err := i.CanWriteScene(s.Scene(), operator); err != nil { + return inp.SchemaID, err + } + + datasets, err := i.datasetRepo.FindBySchemaAll(ctx, inp.SchemaID) + if err != nil { + return inp.SchemaID, err + } + if (inp.Force == nil || !*inp.Force) && len(datasets) != 0 { + return inp.SchemaID, errors.New("can not remove non-empty schema") + } + + // Begin Db transaction + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + // list of datasets attached by the schema + dsids := []id.DatasetID{} + var properties []*property.Property + for _, d := range datasets { + properties, err = i.propertyRepo.FindByDataset(ctx, inp.SchemaID, d.ID()) + if err != nil { + return inp.SchemaID, err + } + + for _, p := range properties { + // unlinking fields + p.UnlinkAllByDataset(inp.SchemaID, d.ID()) + } + + dsids = append(dsids, d.ID()) + } + + // unlink layers (items and groups) and save + layers, err := i.layerRepo.FindAllByDatasetSchema(ctx, inp.SchemaID) + if err != nil { + return inp.SchemaID, err + } + + for _, li := range layers.ToLayerItemList() { + li.Unlink() + } + + for _, lg := range layers.ToLayerGroupList() { + lg.Unlink() + + groupItems, err := i.layerRepo.FindItemByIDs(ctx, lg.Layers().Layers()) + if err != nil { + return inp.SchemaID, err + } + + // unlink layers group items + for _, item := range groupItems { + item.Unlink() + } + + // save the changed layers + layers = append(layers, groupItems.ToLayerList()...) + } + + err = i.propertyRepo.SaveAll(ctx, properties) + if err != nil { + return inp.SchemaID, err + } + + err = i.layerRepo.SaveAll(ctx, layers) + if err != nil { + return inp.SchemaID, err + } + + err = i.datasetRepo.RemoveAll(ctx, dsids) + if err != nil { + return inp.SchemaID, err + } + + err = i.datasetSchemaRepo.Remove(ctx, inp.SchemaID) + if err != nil { + return inp.SchemaID, err + } + + tx.Commit() + return inp.SchemaID, nil +} diff --git a/server/internal/usecase/interactor/emails/auth_html.tmpl b/server/internal/usecase/interactor/emails/auth_html.tmpl new file mode 100644 index 000000000..9d1d1e3ac --- /dev/null +++ b/server/internal/usecase/interactor/emails/auth_html.tmpl @@ -0,0 +1,435 @@ + + + + + + + Re:Earth reset password + + + + + + + + + + + + + \ No newline at end of file diff --git a/server/internal/usecase/interactor/emails/auth_text.tmpl b/server/internal/usecase/interactor/emails/auth_text.tmpl new file mode 100644 index 000000000..0ed590d5b --- /dev/null +++ b/server/internal/usecase/interactor/emails/auth_text.tmpl @@ -0,0 +1,7 @@ +Hi {{ .UserName }}: +{{ .Message }} + +To {{ .ActionLabel }}: +{{ .ActionURL }} + +{{ .Suffix }} \ No newline at end of file diff --git a/server/internal/usecase/interactor/layer.go b/server/internal/usecase/interactor/layer.go new file mode 100644 index 000000000..1424a1440 --- /dev/null +++ b/server/internal/usecase/interactor/layer.go @@ -0,0 +1,1060 @@ +package interactor + +import ( + "context" + "encoding/json" + "encoding/xml" + "errors" + "io" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/decoding" + "github.com/reearth/reearth-backend/pkg/layer/encoding" + "github.com/reearth/reearth-backend/pkg/layer/layerops" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/shp" + "github.com/reearth/reearth-backend/pkg/tag" +) + +// TODO: ใƒฌใ‚คใƒคใƒผไฝœๆˆใฎใƒ‰ใƒกใ‚คใƒณใƒญใ‚ธใƒƒใ‚ฏใŒใ“ใ“ใซๅคšใๆผใ‚Œๅ‡บใ—ใฆใ„ใ‚‹ใฎใงใƒ‰ใƒกใ‚คใƒณๅฑคใซ็งปใ™ + +type Layer struct { + common + commonSceneLock + layerRepo repo.Layer + tagRepo repo.Tag + pluginRepo repo.Plugin + propertyRepo repo.Property + propertySchemaRepo repo.PropertySchema + datasetRepo repo.Dataset + datasetSchemaRepo repo.DatasetSchema + sceneRepo repo.Scene + sceneLockRepo repo.SceneLock + transaction repo.Transaction +} + +func NewLayer(r *repo.Container) interfaces.Layer { + return &Layer{ + commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, + layerRepo: r.Layer, + tagRepo: r.Tag, + pluginRepo: r.Plugin, + propertyRepo: r.Property, + datasetRepo: r.Dataset, + propertySchemaRepo: r.PropertySchema, + datasetSchemaRepo: r.DatasetSchema, + sceneRepo: r.Scene, + sceneLockRepo: r.SceneLock, + transaction: r.Transaction, + } +} + +func (i *Layer) Fetch(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) (layer.List, error) { + return i.layerRepo.FindByIDs(ctx, ids) +} + +func (i *Layer) FetchGroup(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*layer.Group, error) { + return i.layerRepo.FindGroupByIDs(ctx, ids) +} + +func (i *Layer) FetchItem(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*layer.Item, error) { + return i.layerRepo.FindItemByIDs(ctx, ids) +} + +func (i *Layer) FetchParent(ctx context.Context, pid id.LayerID, operator *usecase.Operator) (*layer.Group, error) { + return i.layerRepo.FindParentByID(ctx, pid) +} + +func (i *Layer) FetchByProperty(ctx context.Context, pid id.PropertyID, operator *usecase.Operator) (layer.Layer, error) { + return i.layerRepo.FindByProperty(ctx, pid) +} + +func (i *Layer) FetchMerged(ctx context.Context, org id.LayerID, parent *id.LayerID, operator *usecase.Operator) (*layer.Merged, error) { + ids := []id.LayerID{org} + if parent != nil { + ids = append(ids, *parent) + } + layers, err := i.layerRepo.FindByIDs(ctx, ids) + if err != nil { + return nil, err + } + layers2 := []*layer.Layer(layers) + + var orgl *layer.Item + var parentl *layer.Group + if parent != nil && len(layers2) == 2 { + l := layers2[0] + orgl = layer.ToLayerItemRef(l) + l = layers2[1] + parentl = layer.ToLayerGroupRef(l) + } else if parent == nil && len(layers2) == 1 { + l := layers2[0] + if l != nil { + orgl = layer.ToLayerItemRef(l) + } + } + + return layer.Merge(orgl, parentl), nil +} + +func (i *Layer) FetchParentAndMerged(ctx context.Context, org id.LayerID, operator *usecase.Operator) (*layer.Merged, error) { + orgl, err := i.layerRepo.FindItemByID(ctx, org) + if err != nil { + return nil, err + } + parent, err := i.layerRepo.FindParentByID(ctx, org) + if err != nil { + return nil, err + } + + return layer.Merge(orgl, parent), nil +} + +func (i *Layer) FetchByTag(ctx context.Context, tag id.TagID, operator *usecase.Operator) (layer.List, error) { + return i.layerRepo.FindByTag(ctx, tag) +} + +func (l *Layer) Export(ctx context.Context, lid id.LayerID, ext string) (io.Reader, string, error) { + _, err := l.layerRepo.FindByID(ctx, lid) + if err != nil { + return nil, "", err + } + + reader, writer := io.Pipe() + e := encoding.EncoderFromExt(strings.ToLower(ext), writer) + if e == nil { + return nil, "", rerror.ErrNotFound + } + ex := &encoding.Exporter{ + Merger: &merging.Merger{ + LayerLoader: repo.LayerLoaderFrom(l.layerRepo), + PropertyLoader: repo.PropertyLoaderFrom(l.propertyRepo), + }, + Sealer: &merging.Sealer{ + DatasetGraphLoader: repo.DatasetGraphLoaderFrom(l.datasetRepo), + }, + Encoder: e, + } + + go func() { + defer func() { + _ = writer.Close() + }() + err = ex.ExportLayerByID(ctx, lid) + }() + + if err != nil { + return nil, "", err + } + return reader, e.MimeType(), nil +} + +func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, operator *usecase.Operator) (_ *layer.Item, _ *layer.Group, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + parentLayer, err := i.layerRepo.FindGroupByID(ctx, inp.ParentLayerID) + if err != nil { + return nil, nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, parentLayer.Scene()); err != nil { + return nil, nil, err + } + + if parentLayer.IsLinked() { + return nil, nil, interfaces.ErrCannotAddLayerToLinkedLayerGroup + } + + var pid *id.PluginID + if inp.ExtensionID != nil { + pid = &id.OfficialPluginID + } + plugin, extension, err := i.getPlugin(ctx, parentLayer.Scene(), pid, inp.ExtensionID) + if err != nil { + return nil, nil, err + } + + propertySchema, err := i.propertySchemaRepo.FindByID(ctx, extension.Schema()) + if err != nil { + return nil, nil, err + } + + layerItem, property, err := layerops.LayerItem{ + SceneID: parentLayer.Scene(), + ParentLayerID: parentLayer.ID(), + Plugin: plugin, + ExtensionID: inp.ExtensionID, + LinkedDatasetID: inp.LinkedDatasetID, + LinkablePropertySchema: propertySchema, + LatLng: inp.LatLng, + Name: inp.Name, + Index: inp.Index, + }.Initialize() + if err != nil { + return nil, nil, err + } + + index := -1 + if inp.Index != nil { + index = *inp.Index + } + + parentLayer.Layers().AddLayer(layerItem.ID(), index) + + if property != nil { + err = i.propertyRepo.Save(ctx, property) + if err != nil { + return nil, nil, err + } + } + + err = i.layerRepo.Save(ctx, layerItem) + if err != nil { + return nil, nil, err + } + err = i.layerRepo.Save(ctx, parentLayer) + if err != nil { + return nil, nil, err + } + + tx.Commit() + return layerItem, parentLayer, nil +} + +func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, operator *usecase.Operator) (_ *layer.Group, _ *layer.Group, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + parentLayer, err := i.layerRepo.FindGroupByID(ctx, inp.ParentLayerID) + if err != nil { + return nil, nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, parentLayer.Scene()); err != nil { + return nil, nil, err + } + + if parentLayer.IsLinked() { + return nil, nil, interfaces.ErrCannotAddLayerToLinkedLayerGroup + } + + var extensionSchemaID id.PropertySchemaID + var propertySchema *property.Schema + + var pid *id.PluginID + if inp.ExtensionID != nil { + pid = &id.OfficialPluginID + } + plug, extension, err := i.getPlugin(ctx, parentLayer.Scene(), pid, inp.ExtensionID) + if err != nil { + return nil, nil, err + } + if extension != nil { + if extension.Type() != plugin.ExtensionTypePrimitive { + return nil, nil, interfaces.ErrExtensionTypeMustBePrimitive + } + extensionSchemaID = extension.Schema() + } + + var datasetSchema *dataset.Schema + var ds dataset.List + if inp.LinkedDatasetSchemaID != nil { + datasetSchema2, err := i.datasetSchemaRepo.FindByID(ctx, *inp.LinkedDatasetSchemaID) + if err != nil { + return nil, nil, err + } + datasetSchema = datasetSchema2 + + ds, err = i.datasetRepo.FindBySchemaAll(ctx, + *inp.LinkedDatasetSchemaID, + ) + if err != nil { + return nil, nil, err + } + } else { + ds = []*dataset.Dataset{} + } + + var p *property.Property + builder := layer.NewGroup().NewID().Scene(parentLayer.Scene()) + if inp.Name == "" && datasetSchema != nil { + builder = builder.Name(datasetSchema.Name()) + } else { + builder = builder.Name(inp.Name) + } + if inp.ExtensionID != nil { + builder = builder.Plugin(&id.OfficialPluginID) + propertySchema, err = i.propertySchemaRepo.FindByID(ctx, extensionSchemaID) + if err != nil { + return nil, nil, err + } + + builder = builder.Extension(inp.ExtensionID) + p, err = property.New(). + NewID(). + Schema(extensionSchemaID). + Scene(parentLayer.Scene()). + Build() + if err != nil { + return nil, nil, err + } + + // auto linking + p.AutoLinkField( + propertySchema, + property.ValueTypeLatLng, + datasetSchema.ID(), + datasetSchema.FieldByType(dataset.ValueTypeLatLng).IDRef(), + nil) + p.AutoLinkField( + propertySchema, + property.ValueTypeURL, + datasetSchema.ID(), + datasetSchema.FieldByType(dataset.ValueTypeURL).IDRef(), + nil) + + builder = builder.Property(p.ID().Ref()) + } + if inp.LinkedDatasetSchemaID != nil { + builder = builder.LinkedDatasetSchema(inp.LinkedDatasetSchemaID) + } + layerGroup, err := builder.Build() + if err != nil { + return nil, nil, err + } + + // create item layers + var representativeFieldID *id.DatasetFieldID + if inp.RepresentativeFieldId != nil { + representativeFieldID = inp.RepresentativeFieldId + } else { + representativeFieldID = datasetSchema.RepresentativeFieldID() + } + + layerItems := make([]*layer.Item, 0, len(ds)) + layerItemProperties := make([]*property.Property, 0, len(ds)) + index := -1 + for _, ds := range ds { + dsid := ds.ID() + + name := "" + if rf := ds.FieldRef(representativeFieldID); rf != nil && rf.Type() == dataset.ValueTypeString { + name = rf.Value().Value().(string) + } + + layerItem, property, err := layerops.LayerItem{ + SceneID: parentLayer.Scene(), + ParentLayerID: layerGroup.ID(), + Plugin: plug, + ExtensionID: inp.ExtensionID, + Index: &index, + LinkedDatasetID: &dsid, + Name: name, + }.Initialize() + + if err != nil { + return nil, nil, err + } + layerItems = append(layerItems, layerItem) + layerItemProperties = append(layerItemProperties, property) + layerGroup.Layers().AddLayer(layerItem.ID(), -1) + } + + // add group to parent + if inp.Index != nil { + index = *inp.Index + } + + parentLayer.Layers().AddLayer(layerGroup.ID(), index) + + // save + var pl layer.Layer = parentLayer + var gl layer.Layer = layerGroup + layers := layer.List{&pl, &gl} + properties := []*property.Property{} + if p != nil { + properties = append(properties, p) + } + + for index, item := range layerItems { + var l layer.Layer = item + layers = append(layers, &l) + if p := layerItemProperties[index]; p != nil { + properties = append(properties, p) + } + } + + err = i.propertyRepo.SaveAll(ctx, properties) + if err != nil { + return nil, nil, err + } + + err = i.layerRepo.SaveAll(ctx, layers) + if err != nil { + return nil, nil, err + } + + tx.Commit() + return layerGroup, parentLayer, nil +} + +func (i *Layer) fetchAllChildren(ctx context.Context, l layer.Layer) ([]id.LayerID, []id.PropertyID, error) { + lidl := layer.ToLayerGroup(l).Layers().Layers() + layers, err := i.layerRepo.FindByIDs(ctx, lidl) + if err != nil { + return nil, nil, err + } + properties := append(make([]id.PropertyID, 0), l.Properties()...) + for _, ll := range layers { + lg := layer.ToLayerGroup(*ll) + li := layer.ToLayerItem(*ll) + if lg != nil { + childrenLayers, childrenProperties, err := i.fetchAllChildren(ctx, lg) + if err != nil { + return nil, nil, err + } + properties = append(properties, childrenProperties...) + lidl = append(lidl, childrenLayers...) + + } + if li != nil { + properties = append(properties, l.Properties()...) + } + + } + return lidl, properties, nil +} + +func (i *Layer) Remove(ctx context.Context, lid id.LayerID, operator *usecase.Operator) (_ id.LayerID, _ *layer.Group, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + l, err := i.layerRepo.FindByID(ctx, lid) + if err != nil { + return lid, nil, err + } + if err := i.CanWriteScene(l.Scene(), operator); err != nil { + return lid, nil, err + } + + if err := i.CheckSceneLock(ctx, l.Scene()); err != nil { + return lid, nil, err + } + + if gl := layer.GroupFromLayer(l); gl != nil && gl.IsRoot() { + return lid, nil, errors.New("root layer cannot be deleted") + } + + parentLayer, err := i.layerRepo.FindParentByID(ctx, lid) + if err != nil && err != rerror.ErrNotFound { + return lid, nil, err + } + if parentLayer != nil { + if l.Scene() != parentLayer.Scene() { + return lid, nil, errors.New("invalid layer") + } + } + + if parentLayer != nil && parentLayer.IsLinked() { + return lid, nil, interfaces.ErrCannotRemoveLayerToLinkedLayerGroup + } + if parentLayer != nil { + parentLayer.Layers().RemoveLayer(lid) + err = i.layerRepo.Save(ctx, parentLayer) + if err != nil { + return lid, nil, err + } + } + layers, properties, err := i.fetchAllChildren(ctx, l) + if err != nil { + return lid, nil, err + } + layers = append(layers, l.ID()) + err = i.layerRepo.RemoveAll(ctx, layers) + if err != nil { + return lid, nil, err + } + err = i.propertyRepo.RemoveAll(ctx, properties) + if err != nil { + return lid, nil, err + } + + tx.Commit() + return lid, parentLayer, nil +} + +func (i *Layer) Update(ctx context.Context, inp interfaces.UpdateLayerInput, operator *usecase.Operator) (_ layer.Layer, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + layer, err := i.layerRepo.FindByID(ctx, inp.LayerID) + if err != nil { + return nil, err + } + if err := i.CanWriteScene(layer.Scene(), operator); err != nil { + return nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, layer.Scene()); err != nil { + return nil, err + } + + if inp.Name != nil { + layer.Rename(*inp.Name) + } + + if inp.Visible != nil { + layer.SetVisible(*inp.Visible) + } + + err = i.layerRepo.Save(ctx, layer) + if err != nil { + return nil, err + } + + tx.Commit() + return layer, nil +} + +func (i *Layer) Move(ctx context.Context, inp interfaces.MoveLayerInput, operator *usecase.Operator) (_ id.LayerID, _ *layer.Group, _ *layer.Group, _ int, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + parentLayer, err := i.layerRepo.FindParentByID(ctx, inp.LayerID) + if err != nil { + return inp.LayerID, nil, nil, -1, err + } + if err := i.CanWriteScene(parentLayer.Scene(), operator); err != nil { + return inp.LayerID, nil, nil, -1, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, parentLayer.Scene()); err != nil { + return inp.LayerID, nil, nil, -1, err + } + + var toParentLayer *layer.Group + if inp.DestLayerID == nil || parentLayer.ID() == *inp.DestLayerID { + toParentLayer = parentLayer + } else if parentLayer.IsLinked() { + return inp.LayerID, nil, nil, -1, interfaces.ErrLinkedLayerItemCannotBeMoved + } else { + toParentLayer, err = i.layerRepo.FindGroupByID(ctx, *inp.DestLayerID) + if err != nil { + return inp.LayerID, nil, nil, -1, err + } + if toParentLayer.Scene() != parentLayer.Scene() { + return inp.LayerID, nil, nil, -1, interfaces.ErrCannotMoveLayerToOtherScene + } + if toParentLayer.IsLinked() { + return inp.LayerID, nil, nil, -1, interfaces.ErrLayerCannotBeMovedToLinkedLayerGroup + } + } + + toParentLayer.MoveLayerFrom(inp.LayerID, inp.Index, parentLayer) + + layers := layer.List{parentLayer.LayerRef()} + if parentLayer.ID() != toParentLayer.ID() { + layers = append(layers, toParentLayer.LayerRef()) + } + err = i.layerRepo.SaveAll(ctx, layers) + if err != nil { + return inp.LayerID, nil, nil, -1, err + } + + tx.Commit() + return inp.LayerID, + parentLayer, + toParentLayer, + toParentLayer.Layers().FindLayerIndex(inp.LayerID), + nil +} + +func (i *Layer) CreateInfobox(ctx context.Context, lid id.LayerID, operator *usecase.Operator) (_ layer.Layer, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + l, err := i.layerRepo.FindByID(ctx, lid) + if err != nil { + return nil, err + } + if err := i.CanWriteScene(l.Scene(), operator); err != nil { + return nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, l.Scene()); err != nil { + return nil, err + } + + infobox := l.Infobox() + if infobox != nil { + return nil, interfaces.ErrInfoboxAlreadyExists + } + + schema := builtin.GetPropertySchema(builtin.PropertySchemaIDInfobox) + property, err := property.New().NewID().Schema(schema.ID()).Scene(l.Scene()).Build() + if err != nil { + return nil, err + } + infobox = layer.NewInfobox(nil, property.ID()) + l.SetInfobox(infobox) + + err = i.propertyRepo.Save(ctx, property) + if err != nil { + return nil, err + } + err = i.layerRepo.Save(ctx, l) + if err != nil { + return nil, err + } + + tx.Commit() + return l, nil +} + +func (i *Layer) RemoveInfobox(ctx context.Context, layerID id.LayerID, operator *usecase.Operator) (_ layer.Layer, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + layer, err := i.layerRepo.FindByID(ctx, layerID) + if err != nil { + return nil, err + } + if err := i.CanWriteScene(layer.Scene(), operator); err != nil { + return nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, layer.Scene()); err != nil { + return nil, err + } + + infobox := layer.Infobox() + if infobox == nil { + return nil, interfaces.ErrInfoboxNotFound + } + + layer.SetInfobox(nil) + + err = i.propertyRepo.Remove(ctx, infobox.Property()) + if err != nil { + return nil, err + } + + err = i.layerRepo.Save(ctx, layer) + if err != nil { + return nil, err + } + + tx.Commit() + return layer, nil +} + +func (i *Layer) AddInfoboxField(ctx context.Context, inp interfaces.AddInfoboxFieldParam, operator *usecase.Operator) (_ *layer.InfoboxField, _ layer.Layer, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + l, err := i.layerRepo.FindByID(ctx, inp.LayerID) + if err != nil { + return nil, nil, err + } + if err := i.CanWriteScene(l.Scene(), operator); err != nil { + return nil, nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, l.Scene()); err != nil { + return nil, nil, err + } + + infobox := l.Infobox() + if infobox == nil { + return nil, nil, interfaces.ErrInfoboxNotFound + } + + _, extension, err := i.getPlugin(ctx, l.Scene(), &inp.PluginID, &inp.ExtensionID) + if err != nil { + return nil, nil, err + } + if extension.Type() != plugin.ExtensionTypeBlock { + return nil, nil, interfaces.ErrExtensionTypeMustBeBlock + } + + property, err := property.New().NewID().Schema(extension.Schema()).Scene(l.Scene()).Build() + if err != nil { + return nil, nil, err + } + + field, err := layer.NewInfoboxField(). + NewID(). + Plugin(inp.PluginID). + Extension(inp.ExtensionID). + Property(property.ID()). + Build() + if err != nil { + return nil, nil, err + } + + index := -1 + if inp.Index != nil { + index = *inp.Index + } + infobox.Add(field, index) + + err = i.propertyRepo.Save(ctx, property) + if err != nil { + return nil, nil, err + } + + err = i.layerRepo.Save(ctx, l) + if err != nil { + return nil, nil, err + } + + tx.Commit() + return field, l, err +} + +func (i *Layer) MoveInfoboxField(ctx context.Context, inp interfaces.MoveInfoboxFieldParam, operator *usecase.Operator) (_ id.InfoboxFieldID, _ layer.Layer, _ int, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + layer, err := i.layerRepo.FindByID(ctx, inp.LayerID) + if err != nil { + return inp.InfoboxFieldID, nil, -1, err + } + if err := i.CanWriteScene(layer.Scene(), operator); err != nil { + return inp.InfoboxFieldID, nil, -1, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, layer.Scene()); err != nil { + return inp.InfoboxFieldID, nil, -1, err + } + + infobox := layer.Infobox() + if infobox == nil { + return inp.InfoboxFieldID, nil, -1, interfaces.ErrInfoboxNotFound + } + + infobox.Move(inp.InfoboxFieldID, inp.Index) + + err = i.layerRepo.Save(ctx, layer) + if err != nil { + return inp.InfoboxFieldID, nil, -1, err + } + + tx.Commit() + return inp.InfoboxFieldID, layer, inp.Index, err +} + +func (i *Layer) RemoveInfoboxField(ctx context.Context, inp interfaces.RemoveInfoboxFieldParam, operator *usecase.Operator) (_ id.InfoboxFieldID, _ layer.Layer, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + layer, err := i.layerRepo.FindByID(ctx, inp.LayerID) + if err != nil { + return inp.InfoboxFieldID, nil, err + } + if err := i.CanWriteScene(layer.Scene(), operator); err != nil { + return inp.InfoboxFieldID, nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, layer.Scene()); err != nil { + return inp.InfoboxFieldID, nil, err + } + + infobox := layer.Infobox() + if infobox == nil { + return inp.InfoboxFieldID, nil, interfaces.ErrInfoboxNotFound + } + + infobox.Remove(inp.InfoboxFieldID) + + err = i.layerRepo.Save(ctx, layer) + if err != nil { + return inp.InfoboxFieldID, nil, err + } + + tx.Commit() + return inp.InfoboxFieldID, layer, err +} + +func (i *Layer) getPlugin(ctx context.Context, sid id.SceneID, p *id.PluginID, e *id.PluginExtensionID) (*plugin.Plugin, *plugin.Extension, error) { + if p == nil { + return nil, nil, nil + } + + plugin, err := i.pluginRepo.FindByID(ctx, *p) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return nil, nil, interfaces.ErrPluginNotFound + } + return nil, nil, err + } + + if e == nil { + return plugin, nil, nil + } + + extension := plugin.Extension(*e) + if extension == nil { + return nil, nil, interfaces.ErrExtensionNotFound + } + + return plugin, extension, nil +} + +func (i *Layer) ImportLayer(ctx context.Context, inp interfaces.ImportLayerParam, operator *usecase.Operator) (_ layer.List, _ *layer.Group, err error) { + if inp.File == nil { + return nil, nil, interfaces.ErrFileNotIncluded + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + parent, err := i.layerRepo.FindGroupByID(ctx, inp.LayerID) + if err != nil { + return nil, nil, err + } + if err := i.CanWriteScene(parent.Scene(), operator); err != nil { + return nil, nil, err + } + + var decoder decoding.Decoder + switch inp.Format { + case decoding.LayerEncodingFormatKML: + d := xml.NewDecoder(inp.File.Content) + decoder = decoding.NewKMLDecoder(d, parent.Scene()) + case decoding.LayerEncodingFormatGEOJSON: + decoder = decoding.NewGeoJSONDecoder(inp.File.Content, parent.Scene()) + case decoding.LayerEncodingFormatCZML: + d := json.NewDecoder(inp.File.Content) + decoder = decoding.NewCZMLDecoder(d, parent.Scene()) + case decoding.LayerEncodingFormatREEARTH: + d := json.NewDecoder(inp.File.Content) + decoder = decoding.NewReearthDecoder(d, parent.Scene()) + case decoding.LayerEncodingFormatSHAPE: + // limit file size to 2m + if inp.File.Size > 2097152 { + return nil, nil, errors.New("file is too big") + } + var reader decoding.ShapeReader + if inp.File.ContentType == "application/octet-stream" && strings.HasSuffix(inp.File.Path, ".shp") { + reader, err = shp.ReadFrom(inp.File.Content) + if err != nil { + return nil, nil, err + } + decoder = decoding.NewShapeDecoder(reader, parent.Scene()) + } else if inp.File.ContentType == "application/zip" && strings.HasSuffix(inp.File.Path, ".zip") { + reader, err = shp.ReadZipFrom(inp.File.Content) + if err != nil { + return nil, nil, err + } + } + decoder = decoding.NewShapeDecoder(reader, parent.Scene()) + } + if decoder == nil { + return nil, nil, errors.New("unsupported format") + } + result, err := decoder.Decode() + if err != nil { + return nil, nil, err + } + + properties := result.Properties.List() + if err := (property.Validator{ + SchemaLoader: repo.PropertySchemaLoaderFrom(i.propertySchemaRepo), + }.Validate(ctx, properties)); err != nil { + return nil, nil, err + } + + rootLayers := result.RootLayers() + if len(rootLayers) == 0 { + return nil, nil, errors.New("no layers are imported") + } + + if result.Root.LayerCount() > 0 { + parent.Layers().AppendLayers(result.Root.Layers()...) + } + + if err := i.layerRepo.SaveAll(ctx, append(result.Layers.List(), parent.LayerRef())); err != nil { + return nil, nil, err + } + + if err := i.propertyRepo.SaveAll(ctx, properties); err != nil { + return nil, nil, err + } + + tx.Commit() + return rootLayers, parent, nil +} + +func (i *Layer) AttachTag(ctx context.Context, layerID id.LayerID, tagID id.TagID, operator *usecase.Operator) (layer.Layer, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + // ensure the tag exists + t, err := i.tagRepo.FindByID(ctx, tagID) + if err != nil { + return nil, err + } + if err := i.CanWriteScene(t.Scene(), operator); err != nil { + return nil, err + } + + l, err := i.layerRepo.FindByID(ctx, layerID) + if err != nil { + return nil, err + } + + updated := false + if tg := tag.ToTagGroup(t); tg != nil { + updated = l.Tags().Add(layer.NewTagGroup(tagID, nil)) + } else if ti := tag.ToTagItem(t); ti != nil { + if p := ti.Parent(); p != nil { + updated = l.Tags().FindGroup(*ti.Parent()).Add(layer.NewTagItem(ti.ID())) + } else { + updated = l.Tags().Add(layer.NewTagItem(ti.ID())) + } + } + + if updated { + if err := i.layerRepo.Save(ctx, l); err != nil { + return nil, err + } + } + + tx.Commit() + return l, nil +} + +func (i *Layer) DetachTag(ctx context.Context, layerID id.LayerID, tagID id.TagID, operator *usecase.Operator) (layer.Layer, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + layer, err := i.layerRepo.FindByID(ctx, layerID) + if err != nil { + return nil, err + } + if err := i.CanWriteScene(layer.Scene(), operator); err != nil { + return nil, err + } + + if layer.Tags().Delete(tagID) { + if err := i.layerRepo.Save(ctx, layer); err != nil { + return nil, err + } + } + + tx.Commit() + return layer, nil +} diff --git a/server/internal/usecase/interactor/layer_test.go b/server/internal/usecase/interactor/layer_test.go new file mode 100644 index 000000000..1dcef48cc --- /dev/null +++ b/server/internal/usecase/interactor/layer_test.go @@ -0,0 +1,37 @@ +package interactor + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/stretchr/testify/assert" +) + +func TestCreateInfobox(t *testing.T) { + ctx := context.Background() + + db := memory.New() + scene, _ := scene.New().NewID().Team(id.NewTeamID()).Project(id.NewProjectID()).RootLayer(id.NewLayerID()).Build() + _ = db.Scene.Save(ctx, scene) + il := NewLayer(db) + + l, _ := layer.NewItem().NewID().Scene(scene.ID()).Build() + _ = db.Layer.Save(ctx, l) + + i, _ := il.CreateInfobox(ctx, l.ID(), &usecase.Operator{ + WritableScenes: []id.SceneID{scene.ID()}, + }) + assert.NotNil(t, i) + l, err := db.Layer.FindItemByID(ctx, l.ID()) + assert.NoError(t, err) + infobox := l.Infobox() + assert.NotNil(t, infobox) + property, _ := db.Property.FindByID(ctx, infobox.Property()) + assert.NotNil(t, property) + assert.NotNil(t, property.Schema()) +} diff --git a/server/internal/usecase/interactor/plugin.go b/server/internal/usecase/interactor/plugin.go new file mode 100644 index 000000000..d3c02bba3 --- /dev/null +++ b/server/internal/usecase/interactor/plugin.go @@ -0,0 +1,50 @@ +package interactor + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +type Plugin struct { + common + sceneRepo repo.Scene + pluginRepo repo.Plugin + propertySchemaRepo repo.PropertySchema + propertyRepo repo.Property + layerRepo repo.Layer + file gateway.File + transaction repo.Transaction + pluginRegistry gateway.PluginRegistry +} + +func NewPlugin(r *repo.Container, gr *gateway.Container) interfaces.Plugin { + return &Plugin{ + sceneRepo: r.Scene, + layerRepo: r.Layer, + pluginRepo: r.Plugin, + propertySchemaRepo: r.PropertySchema, + propertyRepo: r.Property, + transaction: r.Transaction, + file: gr.File, + pluginRegistry: gr.PluginRegistry, + } +} + +func (i *Plugin) pluginCommon() *pluginCommon { + return &pluginCommon{ + pluginRepo: i.pluginRepo, + propertySchemaRepo: i.propertySchemaRepo, + file: i.file, + pluginRegistry: i.pluginRegistry, + } +} + +func (i *Plugin) Fetch(ctx context.Context, ids []id.PluginID, operator *usecase.Operator) ([]*plugin.Plugin, error) { + return i.pluginRepo.FindByIDs(ctx, ids) +} diff --git a/server/internal/usecase/interactor/plugin_common.go b/server/internal/usecase/interactor/plugin_common.go new file mode 100644 index 000000000..8613222e6 --- /dev/null +++ b/server/internal/usecase/interactor/plugin_common.go @@ -0,0 +1,76 @@ +package interactor + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/pluginpack" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type pluginCommon struct { + pluginRepo repo.Plugin + propertySchemaRepo repo.PropertySchema + file gateway.File + pluginRegistry gateway.PluginRegistry +} + +func (i *pluginCommon) SavePluginPack(ctx context.Context, p *pluginpack.Package) error { + for { + f, err := p.Files.Next() + if err != nil { + return interfaces.ErrInvalidPluginPackage + } + if f == nil { + break + } + if err := i.file.UploadPluginFile(ctx, p.Manifest.Plugin.ID(), f); err != nil { + return rerror.ErrInternalBy(err) + } + } + + // save plugin and property schemas + if ps := p.Manifest.PropertySchemas(); len(ps) > 0 { + if err := i.propertySchemaRepo.SaveAll(ctx, ps); err != nil { + return err + } + } + + if err := i.pluginRepo.Save(ctx, p.Manifest.Plugin); err != nil { + return err + } + + return nil +} + +func (i *pluginCommon) GetOrDownloadPlugin(ctx context.Context, pid id.PluginID) (*plugin.Plugin, error) { + if pid.IsNil() || pid.Equal(id.OfficialPluginID) { + return nil, rerror.ErrNotFound + } + + if plugin, err := i.pluginRepo.FindByID(ctx, pid); err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, err + } else if plugin != nil { + return plugin, nil + } + + if !pid.Scene().IsNil() || i.pluginRegistry == nil { + return nil, rerror.ErrNotFound + } + + pack, err := i.pluginRegistry.FetchPluginPackage(ctx, pid) + if err != nil { + return nil, err + } + + if err := i.SavePluginPack(ctx, pack); err != nil { + return nil, err + } + + return pack.Manifest.Plugin, nil +} diff --git a/server/internal/usecase/interactor/plugin_upload.go b/server/internal/usecase/interactor/plugin_upload.go new file mode 100644 index 000000000..50f947137 --- /dev/null +++ b/server/internal/usecase/interactor/plugin_upload.go @@ -0,0 +1,342 @@ +package interactor + +import ( + "context" + "io" + "net/http" + "net/url" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/reearth/reearth-backend/pkg/plugin/pluginpack" + "github.com/reearth/reearth-backend/pkg/plugin/repourl" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" +) + +var pluginPackageSizeLimit int64 = 10 * 1024 * 1024 // 10MB + +func (i *Plugin) Upload(ctx context.Context, r io.Reader, sid id.SceneID, operator *usecase.Operator) (_ *plugin.Plugin, _ *scene.Scene, err error) { + if err := i.CanWriteScene(sid, operator); err != nil { + return nil, nil, err + } + + p, err := pluginpack.PackageFromZip(r, &sid, pluginPackageSizeLimit) + if err != nil { + return nil, nil, &rerror.Error{ + Label: interfaces.ErrInvalidPluginPackage, + Err: err, + Separate: true, + } + } + + return i.upload(ctx, p, sid, operator) +} + +func (i *Plugin) UploadFromRemote(ctx context.Context, u *url.URL, sid id.SceneID, operator *usecase.Operator) (_ *plugin.Plugin, _ *scene.Scene, err error) { + if err := i.CanWriteScene(sid, operator); err != nil { + return nil, nil, err + } + + ru, err := repourl.New(u) + if err != nil { + return nil, nil, err + } + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, ru.ArchiveURL().String(), nil) + if err != nil { + return nil, nil, interfaces.ErrInvalidPluginPackage + } + + res, err := http.DefaultClient.Do(req) + if err != nil { + return nil, nil, interfaces.ErrInvalidPluginPackage + } + defer func() { + _ = res.Body.Close() + }() + if res.StatusCode != http.StatusOK { + return nil, nil, interfaces.ErrInvalidPluginPackage + } + + p, err := pluginpack.PackageFromZip(res.Body, &sid, pluginPackageSizeLimit) + if err != nil { + _ = res.Body.Close() + return nil, nil, interfaces.ErrInvalidPluginPackage + } + + return i.upload(ctx, p, sid, operator) +} + +func (i *Plugin) upload(ctx context.Context, p *pluginpack.Package, sid id.SceneID, operator *usecase.Operator) (_ *plugin.Plugin, _ *scene.Scene, err error) { + if err := i.CanWriteScene(sid, operator); err != nil { + return nil, nil, err + } + + s, err := i.sceneRepo.FindByID(ctx, sid) + if err != nil { + return nil, nil, err + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + var oldPManifest *manifest.Manifest + newpid := p.Manifest.Plugin.ID() + oldpid := s.Plugins().PluginByName(newpid.Name()).PluginRef() + if oldpid != nil { + oldPlugin, err := i.pluginRepo.FindByID(ctx, *oldpid) + if err != nil { + return nil, nil, err + } + + oldPManifest2, err := i.pluginManifestFromPlugin(ctx, oldPlugin) + if err != nil { + return nil, nil, err + } + oldPManifest = &oldPManifest2 + } + + // new (oldpid == nil): upload files, save plugin and properties -> install + // same (oldpid.Equal(newpid)): delete old files -> upload files, save plugin and property schemas -> migrate + // diff (!oldpid.Equal(newpid)): upload files, save plugin and property schemas -> migrate -> delete old files + + if oldpid != nil && oldpid.Equal(newpid) { + // same only: delete old files + if err := i.file.RemovePlugin(ctx, *oldpid); err != nil { + return nil, nil, err + } + } + + if err := i.pluginCommon().SavePluginPack(ctx, p); err != nil { + return nil, nil, err + } + + if err := i.pluginRepo.Save(ctx, p.Manifest.Plugin); err != nil { + return nil, nil, err + } + + if oldPManifest == nil { + // new: install plugin + if err := i.installScenePlugin(ctx, p, s); err != nil { + return nil, nil, err + } + } else { + // same, diff: migrate + if err := i.migrateScenePlugin(ctx, *oldPManifest, p, s); err != nil { + return nil, nil, err + } + } + + if oldpid != nil && oldPManifest != nil && !oldpid.Equal(newpid) { + // diff only: delete old files + if err := i.file.RemovePlugin(ctx, *oldpid); err != nil { + return nil, nil, err + } + + if oldpid.Scene() != nil { + // remove old scene plugin + if err := i.pluginRepo.Remove(ctx, *oldpid); err != nil { + return nil, nil, err + } + if ps := oldPManifest.Plugin.PropertySchemas(); len(ps) > 0 { + if err := i.propertySchemaRepo.RemoveAll(ctx, ps); err != nil { + return nil, nil, err + } + } + } + } + + tx.Commit() + return p.Manifest.Plugin, s, nil +} + +// installScenePlugin installs the plugin to the scene +func (i *Plugin) installScenePlugin(ctx context.Context, p *pluginpack.Package, s *scene.Scene) (err error) { + var ppid *id.PropertyID + var pp *property.Property + if psid := p.Manifest.Plugin.Schema(); psid != nil { + pp, err = property.New().NewID().Schema(*psid).Build() + if err != nil { + return err + } + } + + s.Plugins().Add(scene.NewPlugin(p.Manifest.Plugin.ID(), ppid)) + + if pp != nil { + if err := i.propertyRepo.Save(ctx, pp); err != nil { + return err + } + } + if err := i.sceneRepo.Save(ctx, s); err != nil { + return err + } + return nil +} + +func (i *Plugin) migrateScenePlugin(ctx context.Context, oldm manifest.Manifest, p *pluginpack.Package, s *scene.Scene) (err error) { + if oldm.Plugin == nil || p.Manifest == nil { + return nil + } + + diff := manifest.DiffFrom(oldm, *p.Manifest) + updatedProperties := property.List{} + + // update scene + var spp *id.PropertyID + if to := diff.PropertySchemaDiff.To; !to.IsNil() && diff.PropertySchemaDiff.From.IsNil() { + // new plugin property + p, err := property.New().NewID().Scene(s.ID()).Schema(to).Build() + if err != nil { + return err + } + spp = p.ID().Ref() + updatedProperties = append(updatedProperties, p) + } + + if sp := s.Plugins().Plugin(diff.From); sp != nil && sp.Property() != nil && diff.PropertySchemaDeleted { + // plugin property should be removed + if err := i.propertyRepo.Remove(ctx, *sp.Property()); err != nil { + return err + } + } + + s.Widgets().UpgradePlugin(diff.From, diff.To) + s.Plugins().Upgrade(diff.From, diff.To, spp, diff.PropertySchemaDeleted) + + // delete layers, blocks and widgets + for _, e := range diff.DeletedExtensions { + deletedProperties, err := i.deleteLayersByPluginExtension(ctx, diff.From, &e.ExtensionID) + if err != nil { + return err + } + + if deletedProperties2, err := i.deleteBlocksByPluginExtension(ctx, diff.From, &e.ExtensionID); err != nil { + return err + } else { + deletedProperties = append(deletedProperties, deletedProperties2...) + } + + deletedProperties = append(deletedProperties, s.Widgets().RemoveAllByPlugin(diff.From, e.ExtensionID.Ref())...) + + if len(deletedProperties) > 0 { + if err := i.propertyRepo.RemoveAll(ctx, deletedProperties); err != nil { + return err + } + } + } + + if err := i.sceneRepo.Save(ctx, s); err != nil { + return err + } + + // migrate layers + if err := i.layerRepo.UpdatePlugin(ctx, diff.From, diff.To); err != nil { + return err + } + + // migrate properties + updatedPropertySchemas := diff.PropertySchmaDiffs() + updatedPropertySchemaIDs := updatedPropertySchemas.FromSchemas() + pl, err := i.propertyRepo.FindBySchema(ctx, updatedPropertySchemaIDs, s.ID()) + if err != nil { + return err + } + for _, p := range pl { + if e := updatedPropertySchemas.FindByFrom(p.Schema()); e != nil && e.Migrate(p) { + updatedProperties = append(updatedProperties, p) + } + } + if len(updatedProperties) > 0 { + if err := i.propertyRepo.SaveAll(ctx, updatedProperties); err != nil { + return err + } + } + + if err := i.propertyRepo.UpdateSchemaPlugin(ctx, diff.From, diff.To, s.ID()); err != nil { + return err + } + + // delete unused schemas and properties + if deleted := diff.DeletedPropertySchemas(); len(deleted) > 0 { + if err := i.propertySchemaRepo.RemoveAll(ctx, deleted); err != nil { + return err + } + } + + return nil +} + +func (i *Plugin) deleteLayersByPluginExtension(ctx context.Context, p id.PluginID, e *id.PluginExtensionID) ([]id.PropertyID, error) { + // delete layers + deletedLayers := []id.LayerID{} + layers, err := i.layerRepo.FindByPluginAndExtension(ctx, p, e) + if err != nil { + return nil, err + } + deletedLayers = append(deletedLayers, layers.IDs().Layers()...) + + parentLayers, err := i.layerRepo.FindParentsByIDs(ctx, deletedLayers) + if err != nil { + return nil, err + } + + for _, p := range parentLayers { + p.Layers().RemoveLayer(deletedLayers...) + } + if err := i.layerRepo.SaveAll(ctx, parentLayers.ToLayerList()); err != nil { + return nil, err + } + if err := i.layerRepo.RemoveAll(ctx, deletedLayers); err != nil { + return nil, err + } + + return layers.Properties(), nil +} + +func (i *Plugin) deleteBlocksByPluginExtension(ctx context.Context, p id.PluginID, e *id.PluginExtensionID) ([]id.PropertyID, error) { + layers, err := i.layerRepo.FindByPluginAndExtensionOfBlocks(ctx, p, e) + if err != nil { + return nil, err + } + + var deletedProperties []id.PropertyID + for _, l := range layers.Deref() { + deletedProperties = append(deletedProperties, l.Infobox().RemoveAllByPlugin(p, e)...) + } + + if err := i.layerRepo.SaveAll(ctx, layers); err != nil { + return nil, err + } + return deletedProperties, nil +} + +func (i *Plugin) pluginManifestFromPlugin(ctx context.Context, p *plugin.Plugin) (manifest.Manifest, error) { + schemas, err := i.propertySchemaRepo.FindByIDs(ctx, p.PropertySchemas()) + if err != nil { + return manifest.Manifest{}, err + } + + var s *property.Schema + if ps := p.Schema(); ps != nil { + s = schemas.Find(*ps) + } + + return manifest.Manifest{ + Plugin: p, + ExtensionSchema: schemas, + Schema: s, + }, nil +} diff --git a/server/internal/usecase/interactor/plugin_upload_test.go b/server/internal/usecase/interactor/plugin_upload_test.go new file mode 100644 index 000000000..1f07795e4 --- /dev/null +++ b/server/internal/usecase/interactor/plugin_upload_test.go @@ -0,0 +1,387 @@ +package interactor + +import ( + "archive/zip" + "bytes" + "context" + "io" + "os" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/fs" + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +const mockPluginManifest = `{ + "id": "testplugin", + "version": "1.0.1", + "name": "testplugin", + "extensions": [ + { + "id": "block", + "type": "block", + "schema": { + "groups": [ + { + "id": "default", + "fields": [ + { + "id": "field", + "type": "string" + } + ] + } + ] + } + }, + { + "id": "widget", + "type": "widget", + "schema": {} + } + ] +}` + +var mockPluginID = id.MustPluginID("testplugin~1.0.1") +var mockPluginFiles = map[string]string{ + "reearth.yml": mockPluginManifest, + "block.js": "// barfoo", +} +var mockPluginArchiveZip bytes.Buffer + +func init() { + zbuf := bytes.Buffer{} + zw := zip.NewWriter(&zbuf) + for p, f := range mockPluginFiles { + w, _ := zw.Create(p) + _, _ = w.Write([]byte(f)) + } + _ = zw.Close() + mockPluginArchiveZip = zbuf +} + +func mockFS(files map[string]string) afero.Fs { + mfs := afero.NewMemMapFs() + for n, c := range files { + f, err := mfs.Create(n) + if err != nil { + panic(err) + } + _, _ = f.Write([]byte(c)) + _ = f.Close() + } + return mfs +} + +func TestPlugin_Upload_New(t *testing.T) { + // upload a new plugin + ctx := context.Background() + team := id.NewTeamID() + sid := id.NewSceneID() + pid := mockPluginID.WithScene(sid.Ref()) + + repos := memory.New() + mfs := mockFS(nil) + files, err := fs.NewFile(mfs, "") + assert.NoError(t, err) + scene := scene.New().ID(sid).Team(team).RootLayer(id.NewLayerID()).MustBuild() + _ = repos.Scene.Save(ctx, scene) + + uc := &Plugin{ + sceneRepo: repos.Scene, + pluginRepo: repos.Plugin, + propertySchemaRepo: repos.PropertySchema, + propertyRepo: repos.Property, + layerRepo: repos.Layer, + file: files, + transaction: repos.Transaction, + } + op := &usecase.Operator{ + WritableTeams: []id.TeamID{team}, + WritableScenes: []id.SceneID{sid}, + } + + reader := bytes.NewReader(mockPluginArchiveZip.Bytes()) + pl, s, err := uc.Upload(ctx, reader, scene.ID(), op) + assert.NoError(t, err) + assert.Equal(t, scene.ID(), s.ID()) + assert.Equal(t, pid, pl.ID()) + + // scene + nscene, err := repos.Scene.FindByID(ctx, scene.ID()) + assert.NoError(t, err) + assert.True(t, nscene.Plugins().HasPlugin(pl.ID())) + + // plugin + npl, err := repos.Plugin.FindByID(ctx, pid) + assert.NoError(t, err) + assert.Equal(t, pid, npl.ID()) + + npf, err := mfs.Open("plugins/" + pid.String() + "/block.js") + assert.NoError(t, err) + npfc, _ := io.ReadAll(npf) + assert.Equal(t, "// barfoo", string(npfc)) +} + +// The plugin and its files should be replaced with the new one (old files are deleted) +// Properties that schema is changed should be migrated +// Layers, widgets, blocks, properties, and property schemas that extension is deleted should deleted +func TestPlugin_Upload_SameVersion(t *testing.T) { + // upgrade plugin to the same version + // 1 extension is deleted -> property schema, layers, and properties of the extension should be deleted + // old plugin files should be deleted + + ctx := context.Background() + team := id.NewTeamID() + sid := id.NewSceneID() + pid := mockPluginID.WithScene(sid.Ref()) + eid1 := id.PluginExtensionID("marker") + eid2 := id.PluginExtensionID("widget2") + wid1 := id.NewWidgetID() + + repos := memory.New() + mfs := mockFS(map[string]string{ + "plugins/" + pid.String() + "/hogehoge": "foobar", + }) + files, err := fs.NewFile(mfs, "") + assert.NoError(t, err) + + ps := property.NewSchema().ID(property.NewSchemaID(pid, eid1.String())).MustBuild() + ps2 := property.NewSchema().ID(property.NewSchemaID(pid, eid2.String())).MustBuild() + pl := plugin.New().ID(pid).Extensions([]*plugin.Extension{ + plugin.NewExtension().ID(eid1).Type(plugin.ExtensionTypePrimitive).Schema(ps.ID()).MustBuild(), + plugin.NewExtension().ID(eid2).Type(plugin.ExtensionTypeWidget).Schema(ps2.ID()).MustBuild(), + }).MustBuild() + + p1 := property.New().NewID().Schema(ps.ID()).Scene(sid).MustBuild() + p2 := property.New().NewID().Schema(ps2.ID()).Scene(sid).MustBuild() + pluginLayer := layer.NewItem().NewID().Scene(sid).Plugin(pid.Ref()).Extension(eid1.Ref()).Property(p1.IDRef()).MustBuild() + rootLayer := layer.NewGroup().NewID().Scene(sid).Layers(layer.NewIDList([]layer.ID{pluginLayer.ID()})).Root(true).MustBuild() + scene := scene.New().ID(sid).Team(team).RootLayer(rootLayer.ID()).Plugins(scene.NewPlugins([]*scene.Plugin{ + scene.NewPlugin(pid, nil), + })).Widgets(scene.NewWidgets([]*scene.Widget{ + scene.MustWidget(wid1, pid, eid2, p2.ID(), false, false), + }, nil)).MustBuild() + + _ = repos.PropertySchema.Save(ctx, ps) + _ = repos.Plugin.Save(ctx, pl) + _ = repos.Property.Save(ctx, p1) + _ = repos.Layer.SaveAll(ctx, layer.List{pluginLayer.LayerRef(), rootLayer.LayerRef()}) + _ = repos.Scene.Save(ctx, scene) + + uc := &Plugin{ + sceneRepo: repos.Scene, + pluginRepo: repos.Plugin, + propertySchemaRepo: repos.PropertySchema, + propertyRepo: repos.Property, + layerRepo: repos.Layer, + file: files, + transaction: repos.Transaction, + } + op := &usecase.Operator{ + WritableTeams: []id.TeamID{team}, + WritableScenes: []id.SceneID{sid}, + } + + reader := bytes.NewReader(mockPluginArchiveZip.Bytes()) + pl, s, err := uc.Upload(ctx, reader, scene.ID(), op) + + assert.NoError(t, err) + assert.Equal(t, scene.ID(), s.ID()) + assert.Equal(t, pid, pl.ID()) + + // scene + nscene, err := repos.Scene.FindByID(ctx, scene.ID()) + assert.NoError(t, err) + assert.True(t, nscene.Plugins().HasPlugin(pl.ID())) + assert.Nil(t, nscene.Widgets().Widget(wid1)) + + nlp2, err := repos.Property.FindByID(ctx, p1.ID()) + assert.Nil(t, nlp2) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + + // plugin + npl, err := repos.Plugin.FindByID(ctx, pid) + assert.NoError(t, err) + assert.Equal(t, pid, npl.ID()) + + nlps, err := repos.PropertySchema.FindByID(ctx, ps.ID()) + assert.Nil(t, nlps) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + + nlps2, err := repos.PropertySchema.FindByID(ctx, ps2.ID()) + assert.Nil(t, nlps2) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + + _, err = mfs.Open("plugins/" + pid.String() + "/hogehoge") + assert.True(t, os.IsNotExist(err)) // deleted + + npf, err := mfs.Open("plugins/" + pid.String() + "/block.js") + assert.NoError(t, err) + npfc, _ := io.ReadAll(npf) + assert.Equal(t, "// barfoo", string(npfc)) + + // layer + nlp, err := repos.Property.FindByID(ctx, p1.ID()) + assert.Nil(t, nlp) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + + nl, err := repos.Layer.FindByID(ctx, pluginLayer.ID()) + assert.Nil(t, nl) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + + nrl, err := repos.Layer.FindGroupByID(ctx, rootLayer.ID()) + assert.NoError(t, err) + assert.Equal(t, []layer.ID{}, nrl.Layers().Layers()) // deleted +} + +// The plugin and its files should be newrly created (old plugin and files are deleted if the plugin is private) +// Properties that schema is changed should be migrated +// Layers, widgets, blocks, properties, and property schemas that extension is deleted should deleted +// Plugin field of layers, widgets, block, properties, and property schemas are replaced with the new plugin ID +func TestPlugin_Upload_DiffVersion(t *testing.T) { + // upgrade plugin to the different version + // plugin ID of property and layers should be updated + + ctx := context.Background() + team := id.NewTeamID() + sid := id.NewSceneID() + oldpid := id.MustPluginID("testplugin~1.0.0").WithScene(sid.Ref()) + pid := mockPluginID.WithScene(sid.Ref()) + eid1 := id.PluginExtensionID("block") + eid2 := id.PluginExtensionID("widget") + nlpsid1 := id.NewPropertySchemaID(pid, eid1.String()) + nlpsid2 := id.NewPropertySchemaID(pid, eid2.String()) + wid := id.NewWidgetID() + + repos := memory.New() + mfs := mockFS(map[string]string{ + "plugins/" + oldpid.String() + "/hogehoge": "foobar", + }) + files, err := fs.NewFile(mfs, "") + assert.NoError(t, err) + + oldpsf := property.NewSchemaField().ID("field").Type(property.ValueTypeNumber).MustBuild() + oldpsg := property.NewSchemaGroup().ID("default").Fields([]*property.SchemaField{oldpsf}).MustBuild() + oldps := property.NewSchema().ID(property.NewSchemaID(oldpid, eid1.String())).Groups(property.NewSchemaGroupList( + []*property.SchemaGroup{oldpsg}, + )).MustBuild() + oldps2 := property.NewSchema().ID(property.NewSchemaID(oldpid, eid2.String())).MustBuild() + oldpl := plugin.New().ID(oldpid).Extensions([]*plugin.Extension{ + plugin.NewExtension().ID(eid1).Type(plugin.ExtensionTypeBlock).Schema(oldps.ID()).MustBuild(), + plugin.NewExtension().ID(eid2).Type(plugin.ExtensionTypeWidget).Schema(oldps2.ID()).MustBuild(), + }).MustBuild() + + pf := property.NewField("field").Value(property.ValueTypeNumber.ValueFrom(100).Some()).MustBuild() + pg := property.NewGroup().NewID().SchemaGroup(oldpsg.ID()).Fields([]*property.Field{pf}).MustBuild() + oldp := property.New().NewID().Schema(oldps.ID()).Scene(sid).Items([]property.Item{pg}).MustBuild() + oldp2 := property.New().NewID().Schema(oldps.ID()).Scene(sid).MustBuild() + oldp3 := property.New().NewID().Schema(oldps.ID()).Scene(sid).MustBuild() + oldp4 := property.New().NewID().Schema(oldps2.ID()).Scene(sid).MustBuild() + ib := layer.NewInfobox([]*layer.InfoboxField{ + layer.NewInfoboxField().NewID().Plugin(oldp3.Schema().Plugin()).Extension(plugin.ExtensionID(oldp3.Schema().ID())).Property(oldp3.ID()).MustBuild(), + }, oldp2.ID()) + pluginLayer := layer.NewItem().NewID().Scene(sid).Plugin(oldpid.Ref()).Extension(eid1.Ref()).Property(oldp.IDRef()).Infobox(ib).MustBuild() + rootLayer := layer.NewGroup().NewID().Scene(sid).Layers(layer.NewIDList([]layer.ID{pluginLayer.ID()})).Root(true).MustBuild() + scene := scene.New().ID(sid).Team(team).RootLayer(rootLayer.ID()).Plugins(scene.NewPlugins([]*scene.Plugin{ + scene.NewPlugin(oldpid, nil), + })).Widgets(scene.NewWidgets([]*scene.Widget{ + scene.MustWidget(wid, oldpid, eid2, oldp4.ID(), true, false), + }, nil)).MustBuild() + + _ = repos.PropertySchema.SaveAll(ctx, property.SchemaList{oldps, oldps2}) + _ = repos.Plugin.Save(ctx, oldpl) + _ = repos.Property.SaveAll(ctx, property.List{oldp, oldp2, oldp3, oldp4}) + _ = repos.Layer.SaveAll(ctx, layer.List{pluginLayer.LayerRef(), rootLayer.LayerRef()}) + _ = repos.Scene.Save(ctx, scene) + + uc := &Plugin{ + sceneRepo: repos.Scene, + pluginRepo: repos.Plugin, + propertySchemaRepo: repos.PropertySchema, + propertyRepo: repos.Property, + layerRepo: repos.Layer, + file: files, + transaction: repos.Transaction, + } + op := &usecase.Operator{ + WritableTeams: []id.TeamID{team}, + WritableScenes: []id.SceneID{sid}, + } + + reader := bytes.NewReader(mockPluginArchiveZip.Bytes()) + oldpl, s2, err := uc.Upload(ctx, reader, sid, op) + + assert.NoError(t, err) + assert.Equal(t, scene.ID(), s2.ID()) + assert.Equal(t, pid, oldpl.ID()) + + // scene + nscene, err := repos.Scene.FindByID(ctx, scene.ID()) + assert.NoError(t, err) + assert.False(t, nscene.Plugins().HasPlugin(oldpid)) + assert.True(t, nscene.Plugins().HasPlugin(pid)) + assert.Equal(t, pid, nscene.Widgets().Widget(wid).Plugin()) + assert.Equal(t, eid2, nscene.Widgets().Widget(wid).Extension()) + + // plugin + opl, err := repos.Plugin.FindByID(ctx, oldpid) + assert.Nil(t, opl) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + + npl, err := repos.Plugin.FindByID(ctx, pid) + assert.NoError(t, err) + assert.Equal(t, pid, npl.ID()) + + olps, err := repos.PropertySchema.FindByID(ctx, oldps.ID()) + assert.Nil(t, olps) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + + nlps1, err := repos.PropertySchema.FindByID(ctx, nlpsid1) + assert.NoError(t, err) + assert.Equal(t, nlpsid1, nlps1.ID()) + + nlps2, err := repos.PropertySchema.FindByID(ctx, nlpsid2) + assert.NoError(t, err) + assert.Equal(t, nlpsid2, nlps2.ID()) + + _, err = mfs.Open("plugins/" + oldpid.String() + "/hogehoge") + assert.True(t, os.IsNotExist(err)) // deleted + + npf, err := mfs.Open("plugins/" + pid.String() + "/block.js") + assert.NoError(t, err) + npfc, _ := io.ReadAll(npf) + assert.Equal(t, "// barfoo", string(npfc)) + + // layer + nl, err := repos.Layer.FindByID(ctx, pluginLayer.ID()) + assert.NoError(t, err) + assert.Equal(t, pid, *nl.Plugin()) + assert.Equal(t, eid1, *nl.Extension()) + assert.Equal(t, oldp.ID(), *nl.Property()) + assert.Equal(t, oldp2.ID(), nl.Infobox().Property()) + assert.Equal(t, oldp3.ID(), nl.Infobox().FieldAt(0).Property()) + + nlp, err := repos.Property.FindByID(ctx, *nl.Property()) + assert.NoError(t, err) + assert.Equal(t, *nl.Property(), nlp.ID()) + assert.Equal(t, nlpsid1, nlp.Schema()) + assert.Equal(t, property.ValueTypeString.ValueFrom("100"), property.ToGroup(nlp.ItemBySchema("default")).Field("field").Value()) + + nlp2, err := repos.Property.FindByID(ctx, oldp2.ID()) + assert.NoError(t, err) + assert.Equal(t, nlpsid1, nlp2.Schema()) + + nlp3, err := repos.Property.FindByID(ctx, oldp3.ID()) + assert.NoError(t, err) + assert.Equal(t, nlpsid1, nlp3.Schema()) +} diff --git a/server/internal/usecase/interactor/project.go b/server/internal/usecase/interactor/project.go new file mode 100644 index 000000000..0ba5c8a7b --- /dev/null +++ b/server/internal/usecase/interactor/project.go @@ -0,0 +1,369 @@ +package interactor + +import ( + "context" + "errors" + "io" + "time" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/scene/builder" +) + +type Project struct { + common + commonSceneLock + assetRepo repo.Asset + projectRepo repo.Project + userRepo repo.User + teamRepo repo.Team + sceneRepo repo.Scene + propertyRepo repo.Property + layerRepo repo.Layer + datasetRepo repo.Dataset + datasetSchemaRepo repo.DatasetSchema + tagRepo repo.Tag + transaction repo.Transaction + file gateway.File +} + +func NewProject(r *repo.Container, gr *gateway.Container) interfaces.Project { + return &Project{ + commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, + assetRepo: r.Asset, + projectRepo: r.Project, + userRepo: r.User, + teamRepo: r.Team, + sceneRepo: r.Scene, + propertyRepo: r.Property, + layerRepo: r.Layer, + datasetRepo: r.Dataset, + datasetSchemaRepo: r.DatasetSchema, + tagRepo: r.Tag, + transaction: r.Transaction, + file: gr.File, + } +} + +func (i *Project) Fetch(ctx context.Context, ids []id.ProjectID, operator *usecase.Operator) ([]*project.Project, error) { + return i.projectRepo.FindByIDs(ctx, ids) +} + +func (i *Project) FindByTeam(ctx context.Context, id id.TeamID, p *usecase.Pagination, operator *usecase.Operator) ([]*project.Project, *usecase.PageInfo, error) { + return i.projectRepo.FindByTeam(ctx, id, p) +} + +func (i *Project) Create(ctx context.Context, p interfaces.CreateProjectParam, operator *usecase.Operator) (_ *project.Project, err error) { + if err := i.CanWriteTeam(p.TeamID, operator); err != nil { + return nil, err + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + pb := project.New(). + NewID(). + Team(p.TeamID). + Visualizer(p.Visualizer) + if p.Name != nil { + pb = pb.Name(*p.Name) + } + if p.Description != nil { + pb = pb.Description(*p.Description) + } + if p.ImageURL != nil { + pb = pb.ImageURL(p.ImageURL) + } + if p.Alias != nil { + pb = pb.Alias(*p.Alias) + } + if p.Archived != nil { + pb = pb.IsArchived(*p.Archived) + } + + project, err := pb.Build() + if err != nil { + return nil, err + } + + err = i.projectRepo.Save(ctx, project) + if err != nil { + return nil, err + } + + tx.Commit() + return project, nil +} + +func (i *Project) Update(ctx context.Context, p interfaces.UpdateProjectParam, operator *usecase.Operator) (_ *project.Project, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + prj, err := i.projectRepo.FindByID(ctx, p.ID) + if err != nil { + return nil, err + } + if err := i.CanWriteTeam(prj.Team(), operator); err != nil { + return nil, err + } + + oldAlias := prj.Alias() + + if p.Name != nil { + prj.UpdateName(*p.Name) + } + + if p.Description != nil { + prj.UpdateDescription(*p.Description) + } + + if p.Alias != nil { + if err := prj.UpdateAlias(*p.Alias); err != nil { + return nil, err + } + } + + if p.DeleteImageURL { + prj.SetImageURL(nil) + } else if p.ImageURL != nil { + prj.SetImageURL(p.ImageURL) + } + + if p.Archived != nil { + prj.SetArchived(*p.Archived) + } + + if p.IsBasicAuthActive != nil { + prj.SetIsBasicAuthActive(*p.IsBasicAuthActive) + } + + if p.BasicAuthUsername != nil { + prj.SetBasicAuthUsername(*p.BasicAuthUsername) + } + + if p.BasicAuthPassword != nil { + prj.SetBasicAuthPassword(*p.BasicAuthPassword) + } + + if p.PublicTitle != nil { + prj.UpdatePublicTitle(*p.PublicTitle) + } + + if p.PublicDescription != nil { + prj.UpdatePublicDescription(*p.PublicDescription) + } + + if p.DeletePublicImage { + prj.UpdatePublicImage("") + } else if p.PublicImage != nil { + prj.UpdatePublicImage(*p.PublicImage) + } + + if p.PublicNoIndex != nil { + prj.UpdatePublicNoIndex(*p.PublicNoIndex) + } + + if err := i.projectRepo.Save(ctx, prj); err != nil { + return nil, err + } + + if prj.PublishmentStatus() != project.PublishmentStatusPrivate && p.Alias != nil && *p.Alias != oldAlias { + if err := i.file.MoveBuiltScene(ctx, oldAlias, *p.Alias); err != nil { + // ignore ErrNotFound + if !errors.Is(err, rerror.ErrNotFound) { + return nil, err + } + } + } + + tx.Commit() + return prj, nil +} + +func (i *Project) CheckAlias(ctx context.Context, alias string) (bool, error) { + if !project.CheckAliasPattern(alias) { + return false, project.ErrInvalidAlias + } + + prj, err := i.projectRepo.FindByPublicName(ctx, alias) + if prj == nil && err == nil || err != nil && errors.Is(err, rerror.ErrNotFound) { + return true, nil + } + + return false, err +} + +func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectParam, operator *usecase.Operator) (_ *project.Project, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + prj, err := i.projectRepo.FindByID(ctx, params.ID) + if err != nil { + return nil, err + } + if err := i.CanWriteTeam(prj.Team(), operator); err != nil { + return nil, err + } + + s, err := i.sceneRepo.FindByProject(ctx, params.ID) + if err != nil { + return nil, err + } + + if err := i.CheckSceneLock(ctx, s.ID()); err != nil { + return nil, err + } + + sceneID := s.ID() + + prevAlias := prj.Alias() + if params.Alias == nil && prevAlias == "" && params.Status != project.PublishmentStatusPrivate { + return nil, interfaces.ErrProjectAliasIsNotSet + } + + var prevPublishedAlias string + if prj.PublishmentStatus() != project.PublishmentStatusPrivate { + prevPublishedAlias = prevAlias + } + + newAlias := prevAlias + if params.Alias != nil { + if prj2, err := i.projectRepo.FindByPublicName(ctx, *params.Alias); err != nil && !errors.Is(rerror.ErrNotFound, err) { + return nil, err + } else if prj2 != nil && prj.ID() != prj2.ID() { + return nil, interfaces.ErrProjectAliasAlreadyUsed + } + + if err := prj.UpdateAlias(*params.Alias); err != nil { + return nil, err + } + newAlias = *params.Alias + } + + newPublishedAlias := newAlias + + // Lock + if err := i.UpdateSceneLock(ctx, sceneID, scene.LockModeFree, scene.LockModePublishing); err != nil { + return nil, err + } + + defer i.ReleaseSceneLock(ctx, sceneID) + + if params.Status == project.PublishmentStatusPrivate { + // unpublish + if err = i.file.RemoveBuiltScene(ctx, prevPublishedAlias); err != nil { + return prj, err + } + } else { + // publish + r, w := io.Pipe() + + // Build + scenes := []id.SceneID{sceneID} + go func() { + var err error + + defer func() { + _ = w.CloseWithError(err) + }() + + err = builder.New( + repo.LayerLoaderFrom(i.layerRepo), + repo.PropertyLoaderFrom(i.propertyRepo), + repo.DatasetGraphLoaderFrom(i.datasetRepo), + repo.TagLoaderFrom(i.tagRepo), + repo.TagSceneLoaderFrom(i.tagRepo, scenes), + ).BuildScene(ctx, w, s, time.Now()) + }() + + // Save + if err := i.file.UploadBuiltScene(ctx, r, newPublishedAlias); err != nil { + return nil, err + } + + // If project has been published before and alias is changed, + // remove old published data. + if prevPublishedAlias != "" && newPublishedAlias != prevPublishedAlias { + if err := i.file.RemoveBuiltScene(ctx, prevPublishedAlias); err != nil { + return nil, err + } + } + } + + prj.UpdatePublishmentStatus(params.Status) + prj.SetPublishedAt(time.Now()) + + if err := i.projectRepo.Save(ctx, prj); err != nil { + return nil, err + } + + tx.Commit() + return prj, nil +} + +func (i *Project) Delete(ctx context.Context, projectID id.ProjectID, operator *usecase.Operator) (err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + prj, err := i.projectRepo.FindByID(ctx, projectID) + if err != nil { + return err + } + if err := i.CanWriteTeam(prj.Team(), operator); err != nil { + return err + } + + deleter := ProjectDeleter{ + SceneDeleter: SceneDeleter{ + Scene: i.sceneRepo, + SceneLock: i.sceneLockRepo, + Layer: i.layerRepo, + Property: i.propertyRepo, + Dataset: i.datasetRepo, + DatasetSchema: i.datasetSchemaRepo, + }, + File: i.file, + Project: i.projectRepo, + } + if err := deleter.Delete(ctx, prj, true, operator); err != nil { + return err + } + + tx.Commit() + return nil +} diff --git a/server/internal/usecase/interactor/property.go b/server/internal/usecase/interactor/property.go new file mode 100644 index 000000000..54e29fc9e --- /dev/null +++ b/server/internal/usecase/interactor/property.go @@ -0,0 +1,502 @@ +package interactor + +import ( + "context" + "errors" + "path" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Property struct { + common + commonSceneLock + propertyRepo repo.Property + propertySchemaRepo repo.PropertySchema + datasetRepo repo.Dataset + datasetSchemaRepo repo.DatasetSchema + sceneRepo repo.Scene + assetRepo repo.Asset + transaction repo.Transaction + file gateway.File +} + +func NewProperty(r *repo.Container, gr *gateway.Container) interfaces.Property { + return &Property{ + commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, + propertyRepo: r.Property, + propertySchemaRepo: r.PropertySchema, + datasetRepo: r.Dataset, + datasetSchemaRepo: r.DatasetSchema, + sceneRepo: r.Scene, + assetRepo: r.Asset, + transaction: r.Transaction, + file: gr.File, + } +} + +func (i *Property) Fetch(ctx context.Context, ids []id.PropertyID, operator *usecase.Operator) ([]*property.Property, error) { + return i.propertyRepo.FindByIDs(ctx, ids) +} + +func (i *Property) FetchSchema(ctx context.Context, ids []id.PropertySchemaID, operator *usecase.Operator) ([]*property.Schema, error) { + return i.propertySchemaRepo.FindByIDs(ctx, ids) +} + +func (i *Property) FetchMerged(ctx context.Context, org, parent *id.PropertyID, linked *id.DatasetID, operator *usecase.Operator) (*property.Merged, error) { + ids := []id.PropertyID{} + if org != nil { + ids = append(ids, *org) + } + if parent != nil { + ids = append(ids, *parent) + } + props, err := i.propertyRepo.FindByIDs(ctx, ids) + if err != nil { + return nil, err + } + + var orgp, parentp *property.Property + if org != nil && parent != nil && len(props) == 2 { + orgp = props[0] + parentp = props[1] + } else if org != nil && parent == nil && len(props) == 1 { + orgp = props[0] + } else if org == nil && parent != nil && len(props) == 1 { + parentp = props[0] + } + + res := property.Merge(orgp, parentp, linked) + return res, nil +} + +func (i *Property) UpdateValue(ctx context.Context, inp interfaces.UpdatePropertyValueParam, operator *usecase.Operator) (p *property.Property, _ *property.GroupList, _ *property.Group, _ *property.Field, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) + if err != nil { + return nil, nil, nil, nil, err + } + if err := i.CanWriteScene(p.Scene(), operator); err != nil { + return nil, nil, nil, nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, nil, nil, nil, err + } + + ps, err := i.propertySchemaRepo.FindByID(ctx, p.Schema()) + if err != nil { + return nil, nil, nil, nil, err + } + + field, pgl, pg, err := p.UpdateValue(ps, inp.Pointer, inp.Value) + if err != nil { + return nil, nil, nil, nil, err + } + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, nil, nil, nil, err + } + + tx.Commit() + return p, pgl, pg, field, nil +} + +func (i *Property) RemoveField(ctx context.Context, inp interfaces.RemovePropertyFieldParam, operator *usecase.Operator) (p *property.Property, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) + if err != nil { + return nil, err + } + if err := i.CanWriteScene(p.Scene(), operator); err != nil { + return nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, err + } + + p.RemoveField(inp.Pointer) + p.Prune() + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, err + } + + tx.Commit() + return p, nil +} + +func (i *Property) UploadFile(ctx context.Context, inp interfaces.UploadFileParam, operator *usecase.Operator) (p *property.Property, pgl *property.GroupList, pg *property.Group, field *property.Field, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if inp.File == nil { + return nil, nil, nil, nil, interfaces.ErrInvalidFile + } + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) + if err != nil { + return nil, nil, nil, nil, err + } + if err := i.CanWriteScene(p.Scene(), operator); err != nil { + return nil, nil, nil, nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, nil, nil, nil, err + } + + propertyScene, err := i.sceneRepo.FindByID(ctx, p.Scene()) + if err != nil { + return nil, nil, nil, nil, err + } + ps, err := i.propertySchemaRepo.FindByID(ctx, p.Schema()) + if err != nil { + return nil, nil, nil, nil, err + } + + field, pgl, pg, _ = p.GetOrCreateField(ps, inp.Pointer) + + if field.Type() != property.ValueTypeURL { + return nil, nil, nil, nil, interfaces.ErrPropertyInvalidType + } + + url, err := i.file.UploadAsset(ctx, inp.File) + if err != nil { + return nil, nil, nil, nil, err + } + + asset, err := asset.New(). + NewID(). + Team(propertyScene.Team()). + Name(path.Base(inp.File.Path)). + Size(inp.File.Size). + URL(url.String()). + Build() + if err != nil { + return nil, nil, nil, nil, err + } + + err = i.assetRepo.Save(ctx, asset) + if err != nil { + return nil, nil, nil, nil, err + } + + v := property.ValueTypeURL.ValueFrom(url) + if v == nil { + return nil, nil, nil, nil, interfaces.ErrInvalidPropertyValue + } + if err = field.Update(v, ps.Groups().Field(field.Field())); err != nil { + return nil, nil, nil, nil, err + } + + if err = i.propertyRepo.Save(ctx, p); err != nil { + return nil, nil, nil, nil, err + } + + tx.Commit() + return p, pgl, pg, field, nil +} + +func (i *Property) LinkValue(ctx context.Context, inp interfaces.LinkPropertyValueParam, operator *usecase.Operator) (p *property.Property, pgl *property.GroupList, pg *property.Group, field *property.Field, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) + if err != nil { + return nil, nil, nil, nil, err + } + if err := i.CanWriteScene(p.Scene(), operator); err != nil { + return nil, nil, nil, nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, nil, nil, nil, err + } + + ps, err := i.propertySchemaRepo.FindByID(ctx, p.Schema()) + if err != nil { + return nil, nil, nil, nil, err + } + + field, pgl, pg, _ = p.GetOrCreateField(ps, inp.Pointer) + + if inp.Links != nil { + dsids := inp.Links.DatasetSchemaIDs() + dids := inp.Links.DatasetIDs() + dss, err := i.datasetSchemaRepo.FindByIDs(ctx, dsids) + if err != nil { + return nil, nil, nil, nil, err + } + ds, err := i.datasetRepo.FindByIDs(ctx, dids) + if err != nil { + return nil, nil, nil, nil, err + } + if !inp.Links.Validate(dss.Map(), ds.Map()) { + return nil, nil, nil, nil, interfaces.ErrInvalidPropertyLinks + } + } + + field.Link(inp.Links) + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, nil, nil, nil, err + } + + tx.Commit() + return p, pgl, pg, field, nil +} + +func (i *Property) UnlinkValue(ctx context.Context, inp interfaces.UnlinkPropertyValueParam, operator *usecase.Operator) (p *property.Property, pgl *property.GroupList, pg *property.Group, field *property.Field, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) + if err != nil { + return nil, nil, nil, nil, err + } + if err := i.CanWriteScene(p.Scene(), operator); err != nil { + return nil, nil, nil, nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, nil, nil, nil, err + } + + ps, err := i.propertySchemaRepo.FindByID(ctx, p.Schema()) + if err != nil { + return nil, nil, nil, nil, err + } + + field, pgl, pg, _ = p.GetOrCreateField(ps, inp.Pointer) + + field.Unlink() + + if field.IsEmpty() { + field = nil + } + p.Prune() + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, nil, nil, nil, err + } + + tx.Commit() + return p, pgl, pg, field, nil +} + +func (i *Property) AddItem(ctx context.Context, inp interfaces.AddPropertyItemParam, operator *usecase.Operator) (p *property.Property, _ *property.GroupList, pg *property.Group, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) + if err != nil { + return nil, nil, nil, err + } + if err := i.CanWriteScene(p.Scene(), operator); err != nil { + return nil, nil, nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, nil, nil, err + } + + ps, err := i.propertySchemaRepo.FindByID(ctx, p.Schema()) + if err != nil { + return nil, nil, nil, err + } + + item, gl := p.AddListItem(ps, inp.Pointer, inp.Index) + if item == nil { + return nil, nil, nil, errors.New("failed to create item") + } + + // Set nameFieldValue to the name field + if inp.NameFieldValue != nil { + item.RepresentativeField(ps).UpdateUnsafe(inp.NameFieldValue) + } + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, nil, nil, err + } + + tx.Commit() + return p, gl, item, nil +} + +func (i *Property) MoveItem(ctx context.Context, inp interfaces.MovePropertyItemParam, operator *usecase.Operator) (p *property.Property, _ *property.GroupList, _ *property.Group, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) + if err != nil { + return nil, nil, nil, err + } + if err := i.CanWriteScene(p.Scene(), operator); err != nil { + return nil, nil, nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, nil, nil, err + } + + item, gl := p.MoveListItem(inp.Pointer, inp.Index) + if item == nil { + return nil, nil, nil, errors.New("failed to move item") + } + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, nil, nil, err + } + + tx.Commit() + return p, gl, item, nil +} + +func (i *Property) RemoveItem(ctx context.Context, inp interfaces.RemovePropertyItemParam, operator *usecase.Operator) (p *property.Property, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) + if err != nil { + return nil, err + } + if err := i.CanWriteScene(p.Scene(), operator); err != nil { + return nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, err + } + + if ok := p.RemoveListItem(inp.Pointer); !ok { + return nil, errors.New("failed to remove item") + } + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, err + } + + tx.Commit() + return p, nil +} + +func (i *Property) UpdateItems(ctx context.Context, inp interfaces.UpdatePropertyItemsParam, operator *usecase.Operator) (*property.Property, error) { + p, err := i.propertyRepo.FindByID(ctx, inp.PropertyID) + if err != nil { + return nil, err + } + if err := i.CanWriteScene(p.Scene(), operator); err != nil { + return nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, err + } + + ps, err := i.propertySchemaRepo.FindByID(ctx, p.Schema()) + if err != nil { + return nil, err + } + + for _, op := range inp.Operations { + var ptr *property.Pointer + if op.ItemID != nil { + ptr = property.PointItem(*op.ItemID) + } + + if op.Operation == interfaces.ListOperationAdd { + g, _ := p.AddListItem(ps, inp.Pointer, op.Index) + if op.NameFieldValue != nil { + g.RepresentativeField(ps).UpdateUnsafe(op.NameFieldValue) + } + } else if op.Operation == interfaces.ListOperationMove && ptr != nil && op.Index != nil { + _, _ = p.MoveListItem(ptr, *op.Index) + } else if op.Operation == interfaces.ListOperationRemove && ptr != nil { + _ = p.RemoveListItem(ptr) + } + } + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, err + } + + return p, nil +} diff --git a/server/internal/usecase/interactor/property_test.go b/server/internal/usecase/interactor/property_test.go new file mode 100644 index 000000000..4377b324a --- /dev/null +++ b/server/internal/usecase/interactor/property_test.go @@ -0,0 +1,164 @@ +package interactor + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/stretchr/testify/assert" +) + +func TestProperty_AddItem(t *testing.T) { + ctx := context.Background() + memory := memory.New() + + team := id.NewTeamID() + scene := scene.New().NewID().Team(team).RootLayer(id.NewLayerID()).MustBuild() + psg := property.NewSchemaGroup().ID("foobar").IsList(true).Fields([]*property.SchemaField{ + property.NewSchemaField().ID("field").Type(property.ValueTypeString).MustBuild(), + }).MustBuild() + ps := property.NewSchema().ID(property.MustSchemaID("xxx~1.1.1/aa")). + Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + psg, + })). + MustBuild() + p := property.New().NewID().Scene(scene.ID()).Schema(ps.ID()).MustBuild() + _ = memory.Scene.Save(ctx, scene) + _ = memory.PropertySchema.Save(ctx, ps) + _ = memory.Property.Save(ctx, p) + + uc := &Property{ + commonSceneLock: commonSceneLock{sceneLockRepo: memory.SceneLock}, + propertyRepo: memory.Property, + propertySchemaRepo: memory.PropertySchema, + transaction: memory.Transaction, + } + op := &usecase.Operator{ + ReadableScenes: []id.SceneID{scene.ID()}, + WritableScenes: []id.SceneID{scene.ID()}, + } + + index := -1 + np, npl, npg, err := uc.AddItem(ctx, interfaces.AddPropertyItemParam{ + PropertyID: p.ID(), + Index: &index, + Pointer: property.PointItemBySchema(psg.ID()), + }, op) + assert.NoError(t, err) + assert.NotNil(t, np) + assert.NotNil(t, npl) + assert.NotNil(t, npg) + assert.Equal(t, p.ID(), np.ID()) + assert.Equal(t, psg.ID(), npl.SchemaGroup()) + assert.Equal(t, psg.ID(), npg.SchemaGroup()) + + assert.Same(t, npl, property.ToGroupList(np.ItemBySchema(psg.ID()))) + assert.Equal(t, npg, npl.GroupAt(0)) + assert.Equal(t, 1, len(npl.Groups())) + + np2, _ := memory.Property.FindByID(ctx, p.ID()) + assert.Equal(t, np, np2) +} + +func TestProperty_RemoveItem(t *testing.T) { + ctx := context.Background() + memory := memory.New() + + team := id.NewTeamID() + scene := scene.New().NewID().Team(team).RootLayer(id.NewLayerID()).MustBuild() + psg := property.NewSchemaGroup().ID("foobar").IsList(true).MustBuild() + ps := property.NewSchema().ID(property.MustSchemaID("xxx~1.1.1/aa")). + Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + psg, + })). + MustBuild() + pg := property.NewGroup().NewID().SchemaGroup(psg.ID()).MustBuild() + pl := property.NewGroupList().NewID().SchemaGroup(psg.ID()).Groups([]*property.Group{pg}).MustBuild() + p := property.New().NewID().Scene(scene.ID()).Schema(ps.ID()).Items([]property.Item{pl}).MustBuild() + _ = memory.Scene.Save(ctx, scene) + _ = memory.PropertySchema.Save(ctx, ps) + _ = memory.Property.Save(ctx, p) + + uc := &Property{ + commonSceneLock: commonSceneLock{sceneLockRepo: memory.SceneLock}, + propertyRepo: memory.Property, + propertySchemaRepo: memory.PropertySchema, + transaction: memory.Transaction, + } + op := &usecase.Operator{ + ReadableScenes: []id.SceneID{scene.ID()}, + WritableScenes: []id.SceneID{scene.ID()}, + } + + np, err := uc.RemoveItem(ctx, interfaces.RemovePropertyItemParam{ + PropertyID: p.ID(), + Pointer: property.NewPointer(psg.IDRef(), pg.IDRef(), nil), + }, op) + assert.NoError(t, err) + assert.NotNil(t, np) + assert.Equal(t, p.ID(), np.ID()) + + npl := property.ToGroupList(np.ItemBySchema(psg.ID())) + assert.Equal(t, 0, len(npl.Groups())) + + np2, _ := memory.Property.FindByID(ctx, p.ID()) + assert.Equal(t, np, np2) +} + +func TestProperty_UpdateValue_FieldOfGroupInList(t *testing.T) { + ctx := context.Background() + memory := memory.New() + + team := id.NewTeamID() + scene := scene.New().NewID().Team(team).RootLayer(id.NewLayerID()).MustBuild() + psf := property.NewSchemaField().ID("field").Type(property.ValueTypeString).MustBuild() + psg := property.NewSchemaGroup().ID("foobar").IsList(true).Fields([]*property.SchemaField{psf}).MustBuild() + ps := property.NewSchema().ID(property.MustSchemaID("xxx~1.1.1/aa")). + Groups(property.NewSchemaGroupList([]*property.SchemaGroup{psg})). + MustBuild() + pg := property.NewGroup().NewID().SchemaGroup(psg.ID()).MustBuild() + pl := property.NewGroupList().NewID().SchemaGroup(psg.ID()).Groups([]*property.Group{pg}).MustBuild() + p := property.New().NewID().Scene(scene.ID()).Schema(ps.ID()).Items([]property.Item{pl}).MustBuild() + _ = memory.Scene.Save(ctx, scene) + _ = memory.PropertySchema.Save(ctx, ps) + _ = memory.Property.Save(ctx, p) + + uc := &Property{ + commonSceneLock: commonSceneLock{sceneLockRepo: memory.SceneLock}, + sceneRepo: memory.Scene, + propertyRepo: memory.Property, + propertySchemaRepo: memory.PropertySchema, + transaction: memory.Transaction, + } + op := &usecase.Operator{ + WritableTeams: []id.TeamID{team}, + WritableScenes: []id.SceneID{scene.ID()}, + } + + np, npl, npg, npf, err := uc.UpdateValue(ctx, interfaces.UpdatePropertyValueParam{ + PropertyID: p.ID(), + Pointer: property.PointField(psg.IDRef(), pg.IDRef(), psf.ID()), + Value: property.ValueTypeString.ValueFrom("aaaa"), + }, op) + + assert.NoError(t, err) + assert.NotNil(t, np) + assert.NotNil(t, npl) + assert.NotNil(t, npg) + assert.NotNil(t, npf) + assert.Equal(t, p.ID(), np.ID()) + assert.Equal(t, pl.ID(), npl.ID()) + assert.Equal(t, []*property.Group{pg}, npl.Groups()) + assert.Equal(t, pg.ID(), npg.ID()) + assert.Same(t, npf, npg.Field(psf.ID())) + assert.Equal(t, psf.ID(), npf.Field()) + assert.Equal(t, property.ValueTypeString.ValueFrom("aaaa"), npf.Value()) + + np2, _ := memory.Property.FindByID(ctx, p.ID()) + assert.Equal(t, np, np2) +} diff --git a/server/internal/usecase/interactor/published.go b/server/internal/usecase/interactor/published.go new file mode 100644 index 000000000..1ed460ca0 --- /dev/null +++ b/server/internal/usecase/interactor/published.go @@ -0,0 +1,139 @@ +package interactor + +import ( + "bytes" + "context" + "errors" + "html" + "html/template" + "io" + "net/http" + "net/url" + "regexp" + "strings" + "time" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/cache" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type Published struct { + project repo.Project + file gateway.File + indexHTML *cache.Cache[string] + indexHTMLStr string +} + +func NewPublished(project repo.Project, file gateway.File, indexHTML string) interfaces.Published { + return &Published{ + project: project, + file: file, + indexHTMLStr: indexHTML, + } +} + +func NewPublishedWithURL(project repo.Project, file gateway.File, indexHTMLURL *url.URL) interfaces.Published { + return &Published{ + project: project, + file: file, + indexHTML: cache.New(func(c context.Context, i string) (string, error) { + req, err := http.NewRequestWithContext(c, http.MethodGet, indexHTMLURL.String(), nil) + if err != nil { + return "", err + } + res, err := http.DefaultClient.Do(req) + if err != nil { + log.Errorf("published index: conn err: %s", err) + return "", errors.New("failed to fetch HTML") + } + defer func() { + _ = res.Body.Close() + }() + if res.StatusCode >= 300 { + log.Errorf("published index: status err: %d", res.StatusCode) + return "", errors.New("failed to fetch HTML") + } + str, err := io.ReadAll(res.Body) + if err != nil { + log.Errorf("published index: read err: %s", err) + return "", errors.New("failed to fetch HTML") + } + return string(str), nil + }, time.Hour), + } +} + +func (i *Published) Metadata(ctx context.Context, name string) (interfaces.ProjectPublishedMetadata, error) { + prj, err := i.project.FindByPublicName(ctx, name) + if err != nil || prj == nil { + return interfaces.ProjectPublishedMetadata{}, rerror.ErrNotFound + } + + return interfaces.ProjectPublishedMetadataFrom(prj), nil +} + +func (i *Published) Data(ctx context.Context, name string) (io.Reader, error) { + r, err := i.file.ReadBuiltSceneFile(ctx, name) + if err != nil { + return nil, err + } + + return r, nil +} + +func (i *Published) Index(ctx context.Context, name string, u *url.URL) (string, error) { + prj, err := i.project.FindByPublicName(ctx, name) + if err != nil || prj == nil { + return "", err + } + + md := interfaces.ProjectPublishedMetadataFrom(prj) + + html := i.indexHTMLStr + if i.indexHTML != nil { + htmli, err := i.indexHTML.Get(ctx) + if err != nil { + return "", err + } + html = htmli + } + return renderIndex(html, u.String(), md), nil +} + +const headers = `{{if .title}} + {{end}}{{if .description}} + + {{end}}{{if .image}} + + + {{end}} + + {{if .noindex}} + {{end}} +` + +var ( + headersTemplate = template.Must(template.New("headers").Parse(headers)) + titleRegexp = regexp.MustCompile(".+?") +) + +// renderIndex returns index HTML with OGP and some meta tags for the project. +func renderIndex(index, url string, d interfaces.ProjectPublishedMetadata) string { + if d.Title != "" { + index = titleRegexp.ReplaceAllLiteralString(index, ""+html.EscapeString(d.Title)+"") + } + var b bytes.Buffer + _ = headersTemplate.Execute(&b, + map[string]interface{}{ + "title": d.Title, + "description": d.Description, + "image": d.Image, + "noindex": d.Noindex, + "url": url, + }) + return strings.Replace(index, "", b.String()+"", -1) +} diff --git a/server/internal/usecase/interactor/published_test.go b/server/internal/usecase/interactor/published_test.go new file mode 100644 index 000000000..b6c31aae9 --- /dev/null +++ b/server/internal/usecase/interactor/published_test.go @@ -0,0 +1,35 @@ +package interactor + +import ( + "testing" + + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/stretchr/testify/assert" +) + +func TestRenderIndex(t *testing.T) { + assert.Equal(t, ` + xxx> + + + + + + + + + + +`, renderIndex( + ` + Foobar +`, + "https://xxss.com", + interfaces.ProjectPublishedMetadata{ + Title: "xxx>", + Description: "desc", + Image: "hogehoge", + Noindex: true, + }, + )) +} diff --git a/server/internal/usecase/interactor/scene.go b/server/internal/usecase/interactor/scene.go new file mode 100644 index 000000000..be34ce58e --- /dev/null +++ b/server/internal/usecase/interactor/scene.go @@ -0,0 +1,511 @@ +package interactor + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +type Scene struct { + common + sceneRepo repo.Scene + propertyRepo repo.Property + propertySchemaRepo repo.PropertySchema + projectRepo repo.Project + pluginRepo repo.Plugin + layerRepo repo.Layer + datasetRepo repo.Dataset + transaction repo.Transaction + file gateway.File + pluginRegistry gateway.PluginRegistry +} + +func NewScene(r *repo.Container, g *gateway.Container) interfaces.Scene { + return &Scene{ + sceneRepo: r.Scene, + propertyRepo: r.Property, + propertySchemaRepo: r.PropertySchema, + projectRepo: r.Project, + pluginRepo: r.Plugin, + layerRepo: r.Layer, + datasetRepo: r.Dataset, + transaction: r.Transaction, + file: g.File, + pluginRegistry: g.PluginRegistry, + } +} + +func (i *Scene) pluginCommon() *pluginCommon { + return &pluginCommon{ + pluginRepo: i.pluginRepo, + propertySchemaRepo: i.propertySchemaRepo, + file: i.file, + pluginRegistry: i.pluginRegistry, + } +} + +func (i *Scene) Fetch(ctx context.Context, ids []id.SceneID, operator *usecase.Operator) ([]*scene.Scene, error) { + return i.sceneRepo.FindByIDs(ctx, ids) +} + +func (i *Scene) FindByProject(ctx context.Context, id id.ProjectID, operator *usecase.Operator) (*scene.Scene, error) { + return i.sceneRepo.FindByProject(ctx, id) +} + +func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase.Operator) (_ *scene.Scene, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + prj, err := i.projectRepo.FindByID(ctx, pid) + if err != nil { + return nil, err + } + team := prj.Team() + if err := i.CanWriteTeam(team, operator); err != nil { + return nil, err + } + + schema := builtin.GetPropertySchemaByVisualizer(visualizer.VisualizerCesium) + sceneID := id.NewSceneID() + + rootLayer, err := layer.NewGroup().NewID().Scene(sceneID).Root(true).Build() + if err != nil { + return nil, err + } + + ps := scene.NewPlugins([]*scene.Plugin{ + scene.NewPlugin(id.OfficialPluginID, nil), + }) + + p, err := property.New().NewID().Schema(schema.ID()).Scene(sceneID).Build() + if err != nil { + return nil, err + } + + // add default tile + tiles := id.PropertySchemaGroupID("tiles") + g := p.GetOrCreateGroupList(schema, property.PointItemBySchema(tiles)) + g.Add(property.NewGroup().NewID().SchemaGroup(tiles).MustBuild(), -1) + + res, err := scene.New(). + ID(sceneID). + Project(pid). + Team(prj.Team()). + Property(p.ID()). + RootLayer(rootLayer.ID()). + Plugins(ps). + Build() + + if err != nil { + return nil, err + } + + if p != nil { + err = i.propertyRepo.Filtered(repo.SceneFilter{Writable: scene.IDList{sceneID}}).Save(ctx, p) + if err != nil { + return nil, err + } + } + + err = i.layerRepo.Filtered(repo.SceneFilter{Writable: scene.IDList{sceneID}}).Save(ctx, rootLayer) + if err != nil { + return nil, err + } + + err = i.sceneRepo.Save(ctx, res) + if err != nil { + return nil, err + } + + operator.AddNewScene(team, sceneID) + tx.Commit() + return res, err +} + +func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, eid id.PluginExtensionID, operator *usecase.Operator) (_ *scene.Scene, widget *scene.Widget, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + s, err := i.sceneRepo.FindByID(ctx, sid) + if err != nil { + return nil, nil, err + } + if err := i.CanWriteTeam(s.Team(), operator); err != nil { + return nil, nil, err + } + + pr, err := i.pluginRepo.FindByID(ctx, pid) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return nil, nil, interfaces.ErrPluginNotFound + } + return nil, nil, err + } + + extension := pr.Extension(eid) + if extension == nil { + return nil, nil, interfaces.ErrExtensionNotFound + } + + if extension.Type() != plugin.ExtensionTypeWidget { + return nil, nil, interfaces.ErrExtensionTypeMustBeWidget + } + + property, err := property.New().NewID().Schema(extension.Schema()).Scene(sid).Build() + if err != nil { + return nil, nil, err + } + + extended := false + floating := false + var location *plugin.WidgetLocation + if widgetLayout := extension.WidgetLayout(); widgetLayout != nil { + extended = widgetLayout.Extended() + floating = widgetLayout.Floating() + location = widgetLayout.DefaultLocation() + } + + widget, err = scene.NewWidget( + id.NewWidgetID(), + pid, + eid, + property.ID(), + true, + extended, + ) + if err != nil { + return nil, nil, err + } + + s.Widgets().Add(widget) + + if !floating { + var loc scene.WidgetLocation + if location != nil { + loc = scene.WidgetLocation{ + Zone: scene.WidgetZoneType(location.Zone), + Section: scene.WidgetSectionType(location.Section), + Area: scene.WidgetAreaType(location.Area), + } + } else { + loc = scene.WidgetLocation{ + Zone: scene.WidgetZoneOuter, + Section: scene.WidgetSectionLeft, + Area: scene.WidgetAreaTop, + } + } + s.Widgets().Alignment().Area(loc).Add(widget.ID(), -1) + } + + err = i.propertyRepo.Save(ctx, property) + if err != nil { + return nil, nil, err + } + + err = i.sceneRepo.Save(ctx, s) + if err != nil { + return nil, nil, err + } + + tx.Commit() + return s, widget, nil +} + +func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetParam, operator *usecase.Operator) (_ *scene.Scene, _ *scene.Widget, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + scene, err2 := i.sceneRepo.FindByID(ctx, param.SceneID) + if err2 != nil { + return nil, nil, err2 + } + if err := i.CanWriteTeam(scene.Team(), operator); err != nil { + return nil, nil, err + } + + widget := scene.Widgets().Widget(param.WidgetID) + if widget == nil { + return nil, nil, rerror.ErrNotFound + } + _, location := scene.Widgets().Alignment().Find(param.WidgetID) + + pr, err := i.pluginRepo.FindByID(ctx, widget.Plugin()) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return nil, nil, interfaces.ErrPluginNotFound + } + return nil, nil, err + } + + extension := pr.Extension(widget.Extension()) + if extension == nil { + return nil, nil, interfaces.ErrExtensionNotFound + } + + if extension.Type() != plugin.ExtensionTypeWidget { + return nil, nil, interfaces.ErrExtensionTypeMustBeWidget + } + + if param.Enabled != nil { + widget.SetEnabled(*param.Enabled) + } + + if param.Location != nil || param.Index != nil { + if param.Location != nil { + location = *param.Location + } + index := -1 + if param.Index != nil { + index = *param.Index + } + scene.Widgets().Alignment().Move(widget.ID(), location, index) + } + + if param.Extended != nil { + widget.SetExtended(*param.Extended) + } + + // check extendable + if layout := extension.WidgetLayout(); layout != nil { + extendable := layout.Extendable(plugin.WidgetLocation{ + Zone: plugin.WidgetZoneType(location.Zone), + Section: plugin.WidgetSectionType(location.Section), + Area: plugin.WidgetAreaType(location.Area), + }) + if e := widget.Extended(); !extendable && e { + widget.SetExtended(false) + } + } + + err2 = i.sceneRepo.Save(ctx, scene) + if err2 != nil { + return nil, nil, err2 + } + + tx.Commit() + return scene, widget, nil +} + +func (i *Scene) UpdateWidgetAlignSystem(ctx context.Context, param interfaces.UpdateWidgetAlignSystemParam, operator *usecase.Operator) (_ *scene.Scene, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + scene, err2 := i.sceneRepo.FindByID(ctx, param.SceneID) + if err2 != nil { + return nil, err2 + } + if err := i.CanWriteTeam(scene.Team(), operator); err != nil { + return nil, err + } + + area := scene.Widgets().Alignment().Area(param.Location) + + if area == nil { + return nil, errors.New("invalid location") + } + + if param.Align != nil { + area.SetAlignment(*param.Align) + } + + if err = i.sceneRepo.Save(ctx, scene); err != nil { + return nil, err + } + + tx.Commit() + return scene, nil +} + +func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, wid id.WidgetID, operator *usecase.Operator) (_ *scene.Scene, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + scene, err2 := i.sceneRepo.FindByID(ctx, id) + if err2 != nil { + return nil, err2 + } + if err := i.CanWriteTeam(scene.Team(), operator); err != nil { + return nil, err + } + + ws := scene.Widgets() + + widget := ws.Widget(wid) + if widget == nil { + return nil, rerror.ErrNotFound + } + + ws.Remove(wid) + scene.Widgets().Alignment().Remove(wid) + + err2 = i.propertyRepo.Remove(ctx, widget.Property()) + if err2 != nil { + return nil, err2 + } + + err2 = i.sceneRepo.Save(ctx, scene) + if err2 != nil { + return nil, err2 + } + + tx.Commit() + return scene, nil +} + +func (i *Scene) AddCluster(ctx context.Context, sceneID id.SceneID, name string, operator *usecase.Operator) (*scene.Scene, *scene.Cluster, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + s, err := i.sceneRepo.FindByID(ctx, sceneID) + if err != nil { + return nil, nil, err + } + if err := i.CanWriteTeam(s.Team(), operator); err != nil { + return nil, nil, err + } + + prop, err := property.New().NewID().Schema(id.MustPropertySchemaID("reearth/cluster")).Scene(sceneID).Build() + if err != nil { + return nil, nil, err + } + + cid := id.NewClusterID() + cluster, err := scene.NewCluster(cid, name, prop.ID()) + if err != nil { + return nil, nil, err + } + s.Clusters().Add(cluster) + + err = i.propertyRepo.Save(ctx, prop) + if err != nil { + return nil, nil, err + } + + if err := i.sceneRepo.Save(ctx, s); err != nil { + return nil, nil, err + } + + tx.Commit() + return s, cluster, nil +} + +func (i *Scene) UpdateCluster(ctx context.Context, param interfaces.UpdateClusterParam, operator *usecase.Operator) (*scene.Scene, *scene.Cluster, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + s, err := i.sceneRepo.FindByID(ctx, param.SceneID) + if err != nil { + return nil, nil, err + } + if err := i.CanWriteTeam(s.Team(), operator); err != nil { + return nil, nil, err + } + + cluster := s.Clusters().Get(param.ClusterID) + if cluster == nil { + return nil, nil, rerror.ErrNotFound + } + if param.Name != nil { + cluster.Rename(*param.Name) + } + if param.PropertyID != nil { + cluster.UpdateProperty(*param.PropertyID) + } + + if err := i.sceneRepo.Save(ctx, s); err != nil { + return nil, nil, err + } + + tx.Commit() + return s, cluster, nil +} + +func (i *Scene) RemoveCluster(ctx context.Context, sceneID id.SceneID, clusterID id.ClusterID, operator *usecase.Operator) (*scene.Scene, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + s, err := i.sceneRepo.FindByID(ctx, sceneID) + if err != nil { + return nil, err + } + if err := i.CanWriteTeam(s.Team(), operator); err != nil { + return nil, err + } + + s.Clusters().Remove(clusterID) + + if err := i.sceneRepo.Save(ctx, s); err != nil { + return nil, err + } + + tx.Commit() + return s, nil +} diff --git a/server/internal/usecase/interactor/scene_plugin.go b/server/internal/usecase/interactor/scene_plugin.go new file mode 100644 index 000000000..cd0b56036 --- /dev/null +++ b/server/internal/usecase/interactor/scene_plugin.go @@ -0,0 +1,234 @@ +package interactor + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer/layerops" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/scene/sceneops" +) + +func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.PluginID, operator *usecase.Operator) (_ *scene.Scene, _ *id.PropertyID, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + s, err := i.sceneRepo.FindByID(ctx, sid) + if err != nil { + return nil, nil, err + } + if err := i.CanWriteTeam(s.Team(), operator); err != nil { + return nil, nil, err + } + + if s.Plugins().HasPlugin(pid) { + return nil, nil, interfaces.ErrPluginAlreadyInstalled + } + + plugin, err := i.pluginCommon().GetOrDownloadPlugin(ctx, pid) + if err != nil { + if errors.Is(rerror.ErrNotFound, err) { + return nil, nil, interfaces.ErrPluginNotFound + } + return nil, nil, err + } + if plugin == nil { + return nil, nil, interfaces.ErrPluginNotFound + } + if psid := plugin.ID().Scene(); psid != nil && *psid != sid { + return nil, nil, interfaces.ErrPluginNotFound + } + + var p *property.Property + if schema := plugin.Schema(); schema != nil { + p, err = property.New().NewID().Schema(*schema).Scene(sid).Build() + if err != nil { + return nil, nil, err + } + } + + s.Plugins().Add(scene.NewPlugin(pid, p.IDRef())) + + if p != nil { + if err := i.propertyRepo.Save(ctx, p); err != nil { + return nil, nil, err + } + } + + if err := i.sceneRepo.Save(ctx, s); err != nil { + return nil, nil, err + } + + tx.Commit() + return s, p.IDRef(), nil +} + +func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.PluginID, operator *usecase.Operator) (_ *scene.Scene, err error) { + if pid.System() { + return nil, rerror.ErrNotFound + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + scene, err := i.sceneRepo.FindByID(ctx, sid) + if err != nil { + return nil, err + } + if err := i.CanWriteTeam(scene.Team(), operator); err != nil { + return nil, err + } + + pl, err := i.pluginRepo.FindByID(ctx, pid) + if err != nil { + if errors.Is(rerror.ErrNotFound, err) { + return nil, interfaces.ErrPluginNotFound + } + return nil, err + } + + ps := scene.Plugins() + if !ps.Has(pid) { + return nil, interfaces.ErrPluginNotInstalled + } + + removedProperties := []id.PropertyID{} + + // remove plugin + if p := ps.Property(pid); p != nil { + removedProperties = append(removedProperties, *p) + } + ps.Remove(pid) + + // remove widgets + removedProperties = append(removedProperties, scene.Widgets().RemoveAllByPlugin(pid, nil)...) + + // remove blocks + res, err := layerops.Processor{ + LayerLoader: repo.LayerLoaderFrom(i.layerRepo), + RootLayerID: scene.RootLayer(), + }.UninstallPlugin(ctx, pid) + if err != nil { + return nil, err + } + + removedProperties = append(removedProperties, res.RemovedProperties...) + + // save + if len(res.ModifiedLayers) > 0 { + if err := i.layerRepo.SaveAll(ctx, res.ModifiedLayers); err != nil { + return nil, err + } + } + + if len(removedProperties) > 0 { + if err := i.propertyRepo.RemoveAll(ctx, removedProperties); err != nil { + return nil, err + } + } + + if err := i.sceneRepo.Save(ctx, scene); err != nil { + return nil, err + } + + // if the plugin is private, uninstall it + if psid := pid.Scene(); psid != nil && *psid == sid { + if err := i.pluginRepo.Remove(ctx, pl.ID()); err != nil { + return nil, err + } + if ps := pl.PropertySchemas(); len(ps) > 0 { + if err := i.propertySchemaRepo.RemoveAll(ctx, ps); err != nil { + return nil, err + } + } + if err := i.file.RemovePlugin(ctx, pl.ID()); err != nil { + return nil, err + } + } + + tx.Commit() + return scene, nil +} + +func (i *Scene) UpgradePlugin(ctx context.Context, sid id.SceneID, oldPluginID, newPluginID id.PluginID, operator *usecase.Operator) (_ *scene.Scene, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + s, err := i.sceneRepo.FindByID(ctx, sid) + if err != nil { + return nil, err + } + if err := i.CanWriteTeam(s.Team(), operator); err != nil { + return nil, err + } + + if oldPluginID.IsNil() || newPluginID.IsNil() || oldPluginID.Equal(newPluginID) || !oldPluginID.NameEqual(newPluginID) { + return nil, interfaces.ErrCannotUpgradeToPlugin + } + + if !s.Plugins().Has(oldPluginID) { + return nil, interfaces.ErrPluginNotInstalled + } + + if plugin, err := i.pluginCommon().GetOrDownloadPlugin(ctx, newPluginID); err != nil { + return nil, err + } else if plugin == nil { + return nil, interfaces.ErrPluginNotFound + } + + pluginMigrator := sceneops.PluginMigrator{ + Property: repo.PropertyLoaderFrom(i.propertyRepo), + PropertySchema: repo.PropertySchemaLoaderFrom(i.propertySchemaRepo), + Dataset: repo.DatasetLoaderFrom(i.datasetRepo), + Layer: repo.LayerLoaderBySceneFrom(i.layerRepo), + Plugin: repo.PluginLoaderFrom(i.pluginRepo), + } + + result, err := pluginMigrator.MigratePlugins(ctx, s, oldPluginID, newPluginID) + + if err := i.sceneRepo.Save(ctx, result.Scene); err != nil { + return nil, err + } + if err := i.propertyRepo.SaveAll(ctx, result.Properties); err != nil { + return nil, err + } + if err := i.layerRepo.SaveAll(ctx, result.Layers); err != nil { + return nil, err + } + if err := i.layerRepo.RemoveAll(ctx, result.RemovedLayers); err != nil { + return nil, err + } + if err := i.propertyRepo.RemoveAll(ctx, result.RemovedProperties); err != nil { + return nil, err + } + + tx.Commit() + return result.Scene, err +} diff --git a/server/internal/usecase/interactor/scene_plugin_test.go b/server/internal/usecase/interactor/scene_plugin_test.go new file mode 100644 index 000000000..65b75e310 --- /dev/null +++ b/server/internal/usecase/interactor/scene_plugin_test.go @@ -0,0 +1,390 @@ +package interactor + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/fs" + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestScene_InstallPlugin(t *testing.T) { + type args struct { + pluginID plugin.ID + operator *usecase.Operator + } + + type test struct { + name string + installedScenePlugins []*scene.Plugin + args args + wantErr error + } + + sid := scene.NewID() + pid := plugin.MustID("plugin~1.0.0") + pid2 := plugin.MustID("plugin~1.0.1") + pid3 := plugin.MustID("plugin~1.0.1").WithScene(&sid) + pid4 := plugin.MustID("plugin~1.0.1").WithScene(scene.NewID().Ref()) + + tests := []test{ + { + name: "should install a plugin", + args: args{ + pluginID: pid, + }, + }, + { + name: "should install a private plugin with property schema", + args: args{ + pluginID: pid3, + }, + }, + { + name: "already installed", + installedScenePlugins: []*scene.Plugin{ + scene.NewPlugin(pid, nil), + }, + args: args{ + pluginID: pid, + }, + wantErr: interfaces.ErrPluginAlreadyInstalled, + }, + { + name: "not found", + args: args{ + pluginID: pid2, + }, + wantErr: interfaces.ErrPluginNotFound, + }, + { + name: "diff scene", + args: args{ + pluginID: pid4, + }, + wantErr: interfaces.ErrPluginNotFound, + }, + { + name: "operation denied", + args: args{ + operator: &usecase.Operator{}, + }, + wantErr: interfaces.ErrOperationDenied, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert := assert.New(t) + ctx := context.Background() + + tid := id.NewTeamID() + sc := scene.New().ID(sid).RootLayer(id.NewLayerID()).Team(tid).MustBuild() + for _, p := range tt.installedScenePlugins { + sc.Plugins().Add(p) + } + sr := memory.NewSceneWith(sc) + + pl := plugin.New().ID(pid).MustBuild() + pl2 := plugin.New().ID(pid3).Schema(id.NewPropertySchemaID(pid3, "@").Ref()).MustBuild() + pl3 := plugin.New().ID(pid4).MustBuild() + pr := memory.NewPluginWith(pl, pl2, pl3) + + prr := memory.NewProperty() + + uc := &Scene{ + sceneRepo: sr, + pluginRepo: pr, + propertyRepo: prr, + transaction: memory.NewTransaction(), + } + + o := tt.args.operator + if o == nil { + o = &usecase.Operator{ + WritableTeams: id.TeamIDList{tid}, + } + } + gotSc, gotPrid, err := uc.InstallPlugin(ctx, sid, tt.args.pluginID, o) + + if tt.wantErr != nil { + assert.Equal(tt.wantErr, err) + assert.Nil(gotSc) + assert.True(gotPrid.IsNil()) + } else { + assert.NoError(err) + assert.Same(sc, gotSc) + if tt.args.pluginID.Equal(pl2.ID()) { + assert.False(gotPrid.IsNil()) + gotPr, _ := prr.FindByID(ctx, *gotPrid) + assert.Equal(*pl2.Schema(), gotPr.Schema()) + } else { + assert.True(gotPrid.IsNil()) + } + assert.True(gotSc.Plugins().Has(tt.args.pluginID)) + } + }) + } +} + +func TestScene_UninstallPlugin(t *testing.T) { + type args struct { + pluginID plugin.ID + operator *usecase.Operator + } + + type test struct { + name string + args args + wantErr error + } + + sid := scene.NewID() + pid := plugin.MustID("plugin~1.0.0") + pid2 := plugin.MustID("plugin~1.0.1") + pid3 := plugin.MustID("plugin~1.0.2") + pid4 := plugin.MustID("plugin2~1.0.3").WithScene(&sid) + + tests := []test{ + { + name: "should uninstall a plugin", + args: args{ + pluginID: pid, + }, + }, + { + name: "should uninstall a private plugin", + args: args{ + pluginID: pid4, + }, + }, + { + name: "not installed plugin", + args: args{ + pluginID: pid2, + }, + wantErr: interfaces.ErrPluginNotInstalled, + }, + { + name: "not found", + args: args{ + pluginID: pid3, + }, + wantErr: interfaces.ErrPluginNotFound, + }, + { + name: "operation denied", + args: args{ + operator: &usecase.Operator{}, + }, + wantErr: interfaces.ErrOperationDenied, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert := assert.New(t) + ctx := context.Background() + + psid := id.NewPropertySchemaID(pid, "@") + pl3ps := property.NewSchema().ID(psid).MustBuild() + psr := memory.NewPropertySchemaWith(pl3ps) + + pl1 := plugin.New().ID(pid).MustBuild() + pl2 := plugin.New().ID(pid2).MustBuild() + pl3 := plugin.New().ID(pid4).Schema(&psid).MustBuild() + pr := memory.NewPluginWith(pl1, pl2, pl3) + + ppr := property.New().NewID().Scene(sid).Schema(*pl3.Schema()).MustBuild() + ppr2 := property.New().NewID().Scene(sid).Schema(id.NewPropertySchemaID(pid, "a")).MustBuild() + prr := memory.NewPropertyWith(ppr, ppr2) + + ibf := layer.NewInfoboxField().NewID().Plugin(pid).Extension("a").Property(id.NewPropertyID()).MustBuild() + ib := layer.NewInfobox([]*layer.InfoboxField{ibf}, id.NewPropertyID()) + l1 := layer.New().NewID().Scene(sid).Infobox(ib).Item().MustBuild() + l2 := layer.New().NewID().Scene(sid).Group().Layers(layer.NewIDList([]layer.ID{l1.ID()})).MustBuild() + lr := memory.NewLayerWith(l1, l2) + + tid := id.NewTeamID() + sc := scene.New().ID(sid).RootLayer(id.NewLayerID()).Team(tid).MustBuild() + sc.Plugins().Add(scene.NewPlugin(pid, nil)) + sc.Plugins().Add(scene.NewPlugin(pid4, ppr.ID().Ref())) + sw, _ := scene.NewWidget(scene.NewWidgetID(), pid, "a", ppr2.ID(), true, false) + sc.Widgets().Add(sw) + sr := memory.NewSceneWith(sc) + + fsg, _ := fs.NewFile(afero.NewMemMapFs(), "") + + uc := &Scene{ + sceneRepo: sr, + pluginRepo: pr, + propertyRepo: prr, + layerRepo: lr, + propertySchemaRepo: psr, + file: fsg, + transaction: memory.NewTransaction(), + } + + o := tt.args.operator + if o == nil { + o = &usecase.Operator{ + WritableTeams: id.TeamIDList{tid}, + } + } + gotSc, err := uc.UninstallPlugin(ctx, sid, tt.args.pluginID, o) + + if tt.wantErr != nil { + assert.Equal(tt.wantErr, err) + assert.Nil(gotSc) + } else { + assert.NoError(err) + assert.Same(sc, gotSc) + assert.False(gotSc.Plugins().Has(tt.args.pluginID)) + + if tt.args.pluginID.Equal(pid) { + assert.False(sc.Widgets().Has(sw.ID())) + _, err = prr.FindByID(ctx, ppr2.ID()) + assert.Equal(rerror.ErrNotFound, err) + } + + if tt.args.pluginID.Equal(pid4) { + _, err = prr.FindByID(ctx, ppr.ID()) + assert.Equal(rerror.ErrNotFound, err) + } + + if !tt.args.pluginID.Scene().IsNil() { + if tt.args.pluginID.Equal(pid4) { + _, err := psr.FindByID(ctx, ppr.Schema()) + assert.Equal(rerror.ErrNotFound, err) + } + + _, err = pr.FindByID(ctx, tt.args.pluginID) + assert.Equal(rerror.ErrNotFound, err) + } + } + }) + } +} + +func TestScene_UpgradePlugin(t *testing.T) { + type args struct { + old plugin.ID + new plugin.ID + operator *usecase.Operator + } + + type test struct { + name string + args args + wantErr error + } + + sid := scene.NewID() + pid1 := plugin.MustID("plugin~1.0.0") + pid2 := plugin.MustID("plugin~1.0.1") + pid3 := plugin.MustID("plugin~1.0.2") + pid4 := plugin.MustID("pluginx~1.0.2") + + tests := []test{ + { + name: "should upgrade a plugin", + args: args{ + old: pid1, + new: pid2, + }, + }, + { + name: "not installed", + args: args{ + old: pid2, + new: pid3, + }, + wantErr: interfaces.ErrPluginNotInstalled, + }, + { + name: "diff names", + args: args{ + old: pid1, + new: pid4, + }, + wantErr: interfaces.ErrCannotUpgradeToPlugin, + }, + { + name: "operation denied", + args: args{ + operator: &usecase.Operator{}, + }, + wantErr: interfaces.ErrOperationDenied, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert := assert.New(t) + ctx := context.Background() + + pl1ps := property.NewSchema().ID(id.NewPropertySchemaID(pid1, "@")).MustBuild() + pl2ps := property.NewSchema().ID(id.NewPropertySchemaID(pid2, "@")).MustBuild() + psr := memory.NewPropertySchemaWith(pl1ps, pl2ps) + + pl1 := plugin.New().ID(pid1).Schema(pl1ps.ID().Ref()).MustBuild() + pl2 := plugin.New().ID(pid2).Schema(pl2ps.ID().Ref()).MustBuild() + pr := memory.NewPluginWith(pl1, pl2) + + pl1p := property.New().NewID().Scene(sid).Schema(*pl1.Schema()).MustBuild() + prr := memory.NewPropertyWith(pl1p) + + lr := memory.NewLayerWith() + + dsr := memory.NewDataset() + + tid := id.NewTeamID() + sc := scene.New().ID(sid).RootLayer(id.NewLayerID()).Team(tid).MustBuild() + sc.Plugins().Add(scene.NewPlugin(pid1, pl1p.ID().Ref())) + sr := memory.NewSceneWith(sc) + + uc := &Scene{ + sceneRepo: sr, + pluginRepo: pr, + propertyRepo: prr, + propertySchemaRepo: psr, + layerRepo: lr, + datasetRepo: dsr, + transaction: memory.NewTransaction(), + } + + o := tt.args.operator + if o == nil { + o = &usecase.Operator{ + WritableTeams: id.TeamIDList{tid}, + } + } + gotSc, err := uc.UpgradePlugin(ctx, sid, tt.args.old, tt.args.new, o) + + if tt.wantErr != nil { + assert.Equal(tt.wantErr, err) + assert.Nil(gotSc) + } else { + assert.NoError(err) + assert.Same(sc, gotSc) + assert.False(gotSc.Plugins().Has(tt.args.old)) + assert.True(gotSc.Plugins().Has(tt.args.new)) + p, _ := prr.FindByID(ctx, *gotSc.Plugins().Plugin(tt.args.new).Property()) + assert.Equal(*pl2.Schema(), p.Schema()) + } + }) + } +} diff --git a/server/internal/usecase/interactor/tag.go b/server/internal/usecase/interactor/tag.go new file mode 100644 index 000000000..c37aa5aeb --- /dev/null +++ b/server/internal/usecase/interactor/tag.go @@ -0,0 +1,315 @@ +package interactor + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/tag" +) + +type Tag struct { + common + tagRepo repo.Tag + layerRepo repo.Layer + sceneRepo repo.Scene + transaction repo.Transaction +} + +func NewTag(r *repo.Container) interfaces.Tag { + return &Tag{ + tagRepo: r.Tag, + layerRepo: r.Layer, + sceneRepo: r.Scene, + transaction: r.Transaction, + } +} + +func (i *Tag) CreateItem(ctx context.Context, inp interfaces.CreateTagItemParam, operator *usecase.Operator) (*tag.Item, *tag.Group, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if err := i.CanWriteScene(inp.SceneID, operator); err != nil { + return nil, nil, interfaces.ErrOperationDenied + } + + var parent *tag.Group + if inp.Parent != nil { + parent, err = i.tagRepo.FindGroupByID(ctx, *inp.Parent) + if err != nil { + return nil, nil, err + } + } + + builder := tag.NewItem(). + NewID(). + Label(inp.Label). + Scene(inp.SceneID). + Parent(inp.Parent) + if inp.LinkedDatasetSchemaID != nil && inp.LinkedDatasetID != nil && inp.LinkedDatasetField != nil { + builder = builder. + LinkedDatasetFieldID(inp.LinkedDatasetField). + LinkedDatasetID(inp.LinkedDatasetID). + LinkedDatasetSchemaID(inp.LinkedDatasetSchemaID) + } + item, err := builder.Build() + if err != nil { + return nil, nil, err + } + + if parent != nil { + parent.AddTag(item.ID()) + } + + itemt := tag.Tag(item) + tags := []*tag.Tag{&itemt} + if parent != nil { + parentt := tag.Tag(parent) + tags = append(tags, &parentt) + } + if err := i.tagRepo.SaveAll(ctx, tags); err != nil { + return nil, nil, err + } + + tx.Commit() + return item, parent, nil +} + +func (i *Tag) CreateGroup(ctx context.Context, inp interfaces.CreateTagGroupParam, operator *usecase.Operator) (*tag.Group, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if err := i.CanWriteScene(inp.SceneID, operator); err != nil { + return nil, interfaces.ErrOperationDenied + } + + group, err := tag.NewGroup(). + NewID(). + Label(inp.Label). + Scene(inp.SceneID). + Tags(inp.Tags). + Build() + + if err != nil { + return nil, err + } + + err = i.tagRepo.Save(ctx, group) + if err != nil { + return nil, err + } + tx.Commit() + return group, nil +} + +func (i *Tag) Fetch(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Tag, error) { + return i.tagRepo.FindByIDs(ctx, ids) +} + +func (i *Tag) FetchByScene(ctx context.Context, sid id.SceneID, operator *usecase.Operator) ([]*tag.Tag, error) { + return i.tagRepo.FindRootsByScene(ctx, sid) +} + +func (i *Tag) FetchItem(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Item, error) { + return i.tagRepo.FindItemByIDs(ctx, ids) +} + +func (i *Tag) FetchGroup(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Group, error) { + return i.tagRepo.FindGroupByIDs(ctx, ids) +} + +func (i *Tag) AttachItemToGroup(ctx context.Context, inp interfaces.AttachItemToGroupParam, operator *usecase.Operator) (*tag.Group, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + // make sure item exist + ti, err := i.tagRepo.FindItemByID(ctx, inp.ItemID) + if err != nil { + return nil, err + } + if err := i.CanWriteScene(ti.Scene(), operator); err != nil { + return nil, err + } + if ti.Parent() != nil { + return nil, errors.New("tag is already added to the group") + } + + tg, err := i.tagRepo.FindGroupByID(ctx, inp.GroupID) + if err != nil { + return nil, err + } + + if tg.Tags().Has(inp.ItemID) { + return nil, errors.New("tag item is already attached to the group") + } + + tg.Tags().Add(inp.ItemID) + ti.SetParent(tg.ID().Ref()) + + tgt := tag.Tag(tg) + tit := tag.Tag(ti) + if err := i.tagRepo.SaveAll(ctx, []*tag.Tag{&tgt, &tit}); err != nil { + return nil, err + } + + tx.Commit() + return tg, nil +} + +func (i *Tag) DetachItemFromGroup(ctx context.Context, inp interfaces.DetachItemToGroupParam, operator *usecase.Operator) (*tag.Group, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + // make sure item exist + ti, err := i.tagRepo.FindItemByID(ctx, inp.ItemID) + if err != nil { + return nil, err + } + if err := i.CanWriteScene(ti.Scene(), operator); err != nil { + return nil, err + } + + tg, err := i.tagRepo.FindGroupByID(ctx, inp.GroupID) + if err != nil { + return nil, err + } + + if !tg.Tags().Has(inp.ItemID) { + return nil, errors.New("tag item is not attached to the group") + } + + tg.RemoveTag(inp.ItemID) + ti.SetParent(nil) + + tgt := tag.Tag(tg) + tit := tag.Tag(ti) + if err := i.tagRepo.SaveAll(ctx, []*tag.Tag{&tgt, &tit}); err != nil { + return nil, err + } + + tx.Commit() + return tg, nil +} + +func (i *Tag) UpdateTag(ctx context.Context, inp interfaces.UpdateTagParam, operator *usecase.Operator) (*tag.Tag, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + tg, err := i.tagRepo.FindByID(ctx, inp.TagID) + if err != nil { + return nil, err + } + if err := i.CanWriteScene(tg.Scene(), operator); err != nil { + return nil, err + } + + if inp.Label != nil { + tg.Rename(*inp.Label) + } + + err = i.tagRepo.Save(ctx, tg) + if err != nil { + return nil, err + } + tx.Commit() + return &tg, nil +} + +func (i *Tag) Remove(ctx context.Context, tagID id.TagID, operator *usecase.Operator) (*id.TagID, layer.List, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + t, err := i.tagRepo.FindByID(ctx, tagID) + if err != nil { + return nil, nil, err + } + if err := i.CanWriteScene(t.Scene(), operator); err != nil { + return nil, nil, err + } + + if group := tag.ToTagGroup(t); group != nil { + if len(group.Tags()) != 0 { + return nil, nil, interfaces.ErrNonemptyTagGroupCannotDelete + } + } + + if item := tag.ToTagItem(t); item != nil { + g, err := i.tagRepo.FindGroupByItem(ctx, item.ID()) + if err != nil && !errors.Is(rerror.ErrNotFound, err) { + return nil, nil, err + } + if g != nil { + g.RemoveTag(item.ID()) + if err := i.tagRepo.Save(ctx, g); err != nil { + return nil, nil, err + } + } + } + + ls, err := i.layerRepo.FindByTag(ctx, tagID) + if err != nil && !errors.Is(rerror.ErrNotFound, err) { + return nil, nil, err + } + + if len(ls) != 0 { + for _, l := range ls.Deref() { + _ = l.Tags().Delete(tagID) + } + if err := i.layerRepo.SaveAll(ctx, ls); err != nil { + return nil, nil, err + } + } + + if err := i.tagRepo.Remove(ctx, tagID); err != nil { + return nil, nil, err + } + + return &tagID, ls, nil +} diff --git a/server/internal/usecase/interactor/team.go b/server/internal/usecase/interactor/team.go new file mode 100644 index 000000000..d0030521d --- /dev/null +++ b/server/internal/usecase/interactor/team.go @@ -0,0 +1,306 @@ +package interactor + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +type Team struct { + common + teamRepo repo.Team + projectRepo repo.Project + userRepo repo.User + transaction repo.Transaction +} + +func NewTeam(r *repo.Container) interfaces.Team { + return &Team{ + teamRepo: r.Team, + projectRepo: r.Project, + userRepo: r.User, + transaction: r.Transaction, + } +} + +func (i *Team) Fetch(ctx context.Context, ids []id.TeamID, operator *usecase.Operator) ([]*user.Team, error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + res, err := i.teamRepo.FindByIDs(ctx, ids) + res2, err := i.filterTeams(res, operator, err) + return res2, err +} + +func (i *Team) FindByUser(ctx context.Context, id id.UserID, operator *usecase.Operator) ([]*user.Team, error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + res, err := i.teamRepo.FindByUser(ctx, id) + res2, err := i.filterTeams(res, operator, err) + return res2, err +} + +func (i *Team) Create(ctx context.Context, name string, firstUser id.UserID, operator *usecase.Operator) (_ *user.Team, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + team, err := user.NewTeam(). + NewID(). + Name(name). + Build() + if err != nil { + return nil, err + } + + if err := team.Members().Join(firstUser, user.RoleOwner); err != nil { + return nil, err + } + + if err := i.teamRepo.Save(ctx, team); err != nil { + return nil, err + } + + operator.AddNewTeam(team.ID()) + tx.Commit() + return team, nil +} + +func (i *Team) Update(ctx context.Context, id id.TeamID, name string, operator *usecase.Operator) (_ *user.Team, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if operator == nil { + return nil, interfaces.ErrOperationDenied + } + + team, err := i.teamRepo.FindByID(ctx, id) + if err != nil { + return nil, err + } + if team.IsPersonal() { + return nil, user.ErrCannotModifyPersonalTeam + } + if team.Members().GetRole(operator.User) != user.RoleOwner { + return nil, interfaces.ErrOperationDenied + } + + team.Rename(name) + + err = i.teamRepo.Save(ctx, team) + if err != nil { + return nil, err + } + + tx.Commit() + return team, nil +} + +func (i *Team) AddMember(ctx context.Context, id id.TeamID, u id.UserID, role user.Role, operator *usecase.Operator) (_ *user.Team, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if operator == nil { + return nil, interfaces.ErrOperationDenied + } + + team, err := i.teamRepo.FindByID(ctx, id) + if err != nil { + return nil, err + } + if team.IsPersonal() { + return nil, user.ErrCannotModifyPersonalTeam + } + if team.Members().GetRole(operator.User) != user.RoleOwner { + return nil, interfaces.ErrOperationDenied + } + + _, err = i.userRepo.FindByID(ctx, u) + if err != nil { + return nil, err + } + + err = team.Members().Join(u, role) + if err != nil { + return nil, err + } + + err = i.teamRepo.Save(ctx, team) + if err != nil { + return nil, err + } + + tx.Commit() + return team, nil +} + +func (i *Team) RemoveMember(ctx context.Context, id id.TeamID, u id.UserID, operator *usecase.Operator) (_ *user.Team, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if operator == nil { + return nil, interfaces.ErrOperationDenied + } + + team, err := i.teamRepo.FindByID(ctx, id) + if err != nil { + return nil, err + } + if team.IsPersonal() { + return nil, user.ErrCannotModifyPersonalTeam + } + if team.Members().GetRole(operator.User) != user.RoleOwner { + return nil, interfaces.ErrOperationDenied + } + + if u == operator.User { + return nil, interfaces.ErrOwnerCannotLeaveTheTeam + } + + err = team.Members().Leave(u) + if err != nil { + return nil, err + } + + err = i.teamRepo.Save(ctx, team) + if err != nil { + return nil, err + } + + tx.Commit() + return team, nil +} + +func (i *Team) UpdateMember(ctx context.Context, id id.TeamID, u id.UserID, role user.Role, operator *usecase.Operator) (_ *user.Team, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if operator == nil { + return nil, interfaces.ErrOperationDenied + } + + team, err := i.teamRepo.FindByID(ctx, id) + if err != nil { + return nil, err + } + if team.IsPersonal() { + return nil, user.ErrCannotModifyPersonalTeam + } + if team.Members().GetRole(operator.User) != user.RoleOwner { + return nil, interfaces.ErrOperationDenied + } + + if u == operator.User { + return nil, interfaces.ErrCannotChangeOwnerRole + } + + err = team.Members().UpdateRole(u, role) + if err != nil { + return nil, err + } + + err = i.teamRepo.Save(ctx, team) + if err != nil { + return nil, err + } + + tx.Commit() + return team, nil +} + +func (i *Team) Remove(ctx context.Context, id id.TeamID, operator *usecase.Operator) (err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if operator == nil { + return interfaces.ErrOperationDenied + } + + team, err := i.teamRepo.FindByID(ctx, id) + if err != nil { + return err + } + if team.IsPersonal() { + return user.ErrCannotModifyPersonalTeam + } + if team.Members().GetRole(operator.User) != user.RoleOwner { + return interfaces.ErrOperationDenied + } + + projects, err := i.projectRepo.CountByTeam(ctx, id) + if err != nil { + return err + } + if projects > 0 { + return interfaces.ErrCannotDeleteTeam + } + + err = i.teamRepo.Remove(ctx, id) + if err != nil { + return err + } + + tx.Commit() + return +} + +func (i *Team) filterTeams(teams []*user.Team, operator *usecase.Operator, err error) ([]*user.Team, error) { + if err != nil { + return nil, err + } + if operator == nil { + return make([]*user.Team, len(teams)), nil + } + for i, t := range teams { + if t == nil || !operator.IsReadableTeam(t.ID()) { + teams[i] = nil + } + } + return teams, nil +} diff --git a/server/internal/usecase/interactor/team_test.go b/server/internal/usecase/interactor/team_test.go new file mode 100644 index 000000000..7a046a236 --- /dev/null +++ b/server/internal/usecase/interactor/team_test.go @@ -0,0 +1,36 @@ +package interactor + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" + "github.com/stretchr/testify/assert" +) + +func TestCreateTeam(t *testing.T) { + ctx := context.Background() + + db := memory.New() + + u := user.New().NewID().Email("aaa@bbb.com").Team(id.NewTeamID()).MustBuild() + teamUC := NewTeam(db) + op := &usecase.Operator{User: u.ID()} + team, err := teamUC.Create(ctx, "team name", u.ID(), op) + + assert.Nil(t, err) + assert.NotNil(t, team) + + resultTeams, _ := teamUC.Fetch(ctx, []id.TeamID{team.ID()}, &usecase.Operator{ + ReadableTeams: []id.TeamID{team.ID()}, + }) + + assert.NotNil(t, resultTeams) + assert.NotEmpty(t, resultTeams) + assert.Equal(t, resultTeams[0].ID(), team.ID()) + assert.Equal(t, resultTeams[0].Name(), "team name") + assert.Equal(t, user.TeamIDList{resultTeams[0].ID()}, op.OwningTeams) +} diff --git a/server/internal/usecase/interactor/usecase.go b/server/internal/usecase/interactor/usecase.go new file mode 100644 index 000000000..bd3c5f4d2 --- /dev/null +++ b/server/internal/usecase/interactor/usecase.go @@ -0,0 +1,121 @@ +package interactor + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" +) + +type uc struct { + tx bool + readableTeams id.TeamIDList + writableTeams id.TeamIDList + readableScenes id.SceneIDList + writableScenes id.SceneIDList +} + +func Usecase() *uc { + return &uc{} +} + +func (u *uc) WithReadableTeams(ids ...id.TeamID) *uc { + u.readableTeams = id.TeamIDList(ids).Clone() + return u +} + +func (u *uc) WithWritableTeams(ids ...id.TeamID) *uc { + u.writableTeams = id.TeamIDList(ids).Clone() + return u +} + +func (u *uc) WithReadablScenes(ids ...id.SceneID) *uc { + u.readableScenes = id.SceneIDList(ids).Clone() + return u +} + +func (u *uc) WithWritableScenes(ids ...id.SceneID) *uc { + u.writableScenes = id.SceneIDList(ids).Clone() + return u +} + +func (u *uc) Transaction() *uc { + u.tx = true + return u +} + +func Run0(ctx context.Context, op *usecase.Operator, r *repo.Container, e *uc, f func() error) (err error) { + _, _, _, err = Run3( + ctx, op, r, e, + func() (_, _, _ any, err error) { + err = f() + return + }) + return +} + +func Run1[A any](ctx context.Context, op *usecase.Operator, r *repo.Container, e *uc, f func() (A, error)) (a A, err error) { + a, _, _, err = Run3( + ctx, op, r, e, + func() (a A, _, _ any, err error) { + a, err = f() + return + }) + return +} + +func Run2[A, B any](ctx context.Context, op *usecase.Operator, r *repo.Container, e *uc, f func() (A, B, error)) (a A, b B, err error) { + a, b, _, err = Run3( + ctx, op, r, e, + func() (a A, b B, _ any, err error) { + a, b, err = f() + return + }) + return +} + +func Run3[A, B, C any](ctx context.Context, op *usecase.Operator, r *repo.Container, e *uc, f func() (A, B, C, error)) (_ A, _ B, _ C, err error) { + if err = e.checkPermission(op); err != nil { + return + } + + if e.tx && r.Transaction != nil { + tx, err2 := r.Transaction.Begin() + if err2 != nil { + err = err2 + return + } + defer func() { + if err == nil { + tx.Commit() + } + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + } + + return f() +} + +func (u *uc) checkPermission(op *usecase.Operator) error { + ok := true + if u.readableTeams != nil { + ok = op.IsReadableTeam(u.readableTeams...) + } + if ok && u.writableTeams != nil { + ok = op.IsWritableTeam(u.writableTeams...) + } + if ok && u.readableScenes != nil { + ok = op.IsReadableScene(u.readableScenes...) + } + if ok && u.writableScenes != nil { + ok = op.IsWritableScene(u.writableScenes...) + } + if !ok { + return interfaces.ErrOperationDenied + } + return nil +} diff --git a/server/internal/usecase/interactor/usecase_test.go b/server/internal/usecase/interactor/usecase_test.go new file mode 100644 index 000000000..be271148e --- /dev/null +++ b/server/internal/usecase/interactor/usecase_test.go @@ -0,0 +1,202 @@ +package interactor + +import ( + "context" + "errors" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestUc_checkPermission(t *testing.T) { + tid := id.NewTeamID() + sid := id.NewSceneID() + + tests := []struct { + name string + op *usecase.Operator + readableTeams id.TeamIDList + writableTeams id.TeamIDList + readableScenes id.SceneIDList + writableScenes id.SceneIDList + wantErr bool + }{ + { + name: "nil operator", + wantErr: false, + }, + { + name: "nil operator 2", + readableTeams: id.TeamIDList{id.NewTeamID()}, + wantErr: false, + }, + { + name: "can read a team", + readableTeams: id.TeamIDList{tid}, + op: &usecase.Operator{ + ReadableTeams: id.TeamIDList{tid}, + }, + wantErr: true, + }, + { + name: "cannot read a team", + readableTeams: id.TeamIDList{id.NewTeamID()}, + op: &usecase.Operator{ + ReadableTeams: id.TeamIDList{}, + }, + wantErr: true, + }, + { + name: "can write a team", + writableTeams: id.TeamIDList{tid}, + op: &usecase.Operator{ + WritableTeams: id.TeamIDList{tid}, + }, + wantErr: true, + }, + { + name: "cannot write a team", + writableTeams: id.TeamIDList{tid}, + op: &usecase.Operator{ + WritableTeams: id.TeamIDList{}, + }, + wantErr: true, + }, + { + name: "can read a scene", + readableScenes: id.SceneIDList{sid}, + op: &usecase.Operator{ + ReadableScenes: id.SceneIDList{sid}, + }, + wantErr: true, + }, + { + name: "cannot read a scene", + readableScenes: id.SceneIDList{sid}, + op: &usecase.Operator{ + ReadableScenes: id.SceneIDList{}, + }, + wantErr: true, + }, + { + name: "can write a scene", + writableScenes: id.SceneIDList{sid}, + op: &usecase.Operator{ + WritableScenes: id.SceneIDList{sid}, + }, + wantErr: true, + }, + { + name: "cannot write a scene", + writableScenes: id.SceneIDList{sid}, + op: &usecase.Operator{ + WritableScenes: id.SceneIDList{}, + }, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + e := &uc{ + readableTeams: tt.readableTeams, + writableTeams: tt.writableTeams, + readableScenes: tt.readableScenes, + writableScenes: tt.writableScenes, + } + got := e.checkPermission(tt.op) + if tt.wantErr { + assert.Equal(t, interfaces.ErrOperationDenied, got) + } else { + assert.Nil(t, got) + } + }) + } +} + +func TestUc(t *testing.T) { + teams := id.TeamIDList{id.NewTeamID(), id.NewTeamID(), id.NewTeamID()} + scenes := id.SceneIDList{id.NewSceneID(), id.NewSceneID(), id.NewSceneID()} + assert.Equal(t, &uc{}, Usecase()) + assert.Equal(t, &uc{readableTeams: teams}, (&uc{}).WithReadableTeams(teams...)) + assert.Equal(t, &uc{writableTeams: teams}, (&uc{}).WithWritableTeams(teams...)) + assert.Equal(t, &uc{readableScenes: scenes}, (&uc{}).WithReadablScenes(scenes...)) + assert.Equal(t, &uc{writableScenes: scenes}, (&uc{}).WithWritableScenes(scenes...)) + assert.Equal(t, &uc{tx: true}, (&uc{}).Transaction()) +} + +func TestRun(t *testing.T) { + ctx := context.Background() + err := errors.New("test") + a, b, c := &struct{}{}, &struct{}{}, &struct{}{} + tr := memory.NewTransaction() + r := &repo.Container{Transaction: tr} + + // regular1: without tx + gota, gotb, gotc, goterr := Run3( + ctx, nil, r, + Usecase(), + func() (any, any, any, error) { + return a, b, c, nil + }, + ) + assert.Same(t, a, gota) + assert.Same(t, b, gotb) + assert.Same(t, c, gotc) + assert.Nil(t, goterr) + assert.Equal(t, 0, tr.Committed()) // not committed + + // regular2: with tx + _ = Run0( + ctx, nil, r, + Usecase().Transaction(), + func() error { + return nil + }, + ) + assert.Equal(t, 1, tr.Committed()) // committed + + // iregular1: the usecase returns an error + goterr = Run0( + ctx, nil, r, + Usecase().Transaction(), + func() error { + return err + }, + ) + assert.Same(t, err, goterr) + assert.Equal(t, 1, tr.Committed()) // not committed + + // iregular2: tx.Begin returns an error + tr.SetBeginError(err) + tr.SetEndError(nil) + goterr = Run0( + ctx, nil, r, + Usecase().Transaction(), + func() error { + return nil + }, + ) + assert.Same(t, err, goterr) + assert.Equal(t, 1, tr.Committed()) // not committed + + // iregular3: tx.End returns an error + tr.SetBeginError(nil) + tr.SetEndError(err) + goterr = Run0( + ctx, nil, r, + Usecase().Transaction(), + func() error { + return nil + }, + ) + assert.Same(t, err, goterr) + assert.Equal(t, 1, tr.Committed()) // fails +} diff --git a/server/internal/usecase/interactor/user.go b/server/internal/usecase/interactor/user.go new file mode 100644 index 000000000..a73ec7c0a --- /dev/null +++ b/server/internal/usecase/interactor/user.go @@ -0,0 +1,503 @@ +package interactor + +import ( + "bytes" + "context" + _ "embed" + "errors" + htmlTmpl "html/template" + "net/mail" + textTmpl "text/template" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/user" +) + +type User struct { + common + userRepo repo.User + teamRepo repo.Team + projectRepo repo.Project + sceneRepo repo.Scene + sceneLockRepo repo.SceneLock + layerRepo repo.Layer + propertyRepo repo.Property + datasetRepo repo.Dataset + datasetSchemaRepo repo.DatasetSchema + transaction repo.Transaction + file gateway.File + authenticator gateway.Authenticator + mailer gateway.Mailer + signupSecret string + authSrvUIDomain string +} + +type mailContent struct { + UserName string + Message string + Suffix string + ActionLabel string + ActionURL htmlTmpl.URL +} + +var ( + //go:embed emails/auth_html.tmpl + autHTMLTMPLStr string + //go:embed emails/auth_text.tmpl + authTextTMPLStr string + + authTextTMPL *textTmpl.Template + authHTMLTMPL *htmlTmpl.Template + + passwordResetMailContent mailContent +) + +func init() { + var err error + authTextTMPL, err = textTmpl.New("passwordReset").Parse(authTextTMPLStr) + if err != nil { + log.Panicf("password reset email template parse error: %s\n", err) + } + authHTMLTMPL, err = htmlTmpl.New("passwordReset").Parse(autHTMLTMPLStr) + if err != nil { + log.Panicf("password reset email template parse error: %s\n", err) + } + + passwordResetMailContent = mailContent{ + Message: "Thank you for using Re:Earth. Weโ€™ve received a request to reset your password. If this was you, please click the link below to confirm and change your password.", + Suffix: "If you did not mean to reset your password, then you can ignore this email.", + ActionLabel: "Confirm to reset your password", + } +} + +func NewUser(r *repo.Container, g *gateway.Container, signupSecret, authSrcUIDomain string) interfaces.User { + return &User{ + userRepo: r.User, + teamRepo: r.Team, + projectRepo: r.Project, + sceneRepo: r.Scene, + sceneLockRepo: r.SceneLock, + layerRepo: r.Layer, + propertyRepo: r.Property, + datasetRepo: r.Dataset, + datasetSchemaRepo: r.DatasetSchema, + transaction: r.Transaction, + file: g.File, + authenticator: g.Authenticator, + signupSecret: signupSecret, + authSrvUIDomain: authSrcUIDomain, + mailer: g.Mailer, + } +} + +func (i *User) Fetch(ctx context.Context, ids []id.UserID, operator *usecase.Operator) ([]*user.User, error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + res, err := i.userRepo.FindByIDs(ctx, ids) + if err != nil { + return res, err + } + // filter + for k, u := range res { + teams, err := i.teamRepo.FindByUser(ctx, u.ID()) + if err != nil { + return res, err + } + teamIDs := make([]id.TeamID, 0, len(teams)) + for _, t := range teams { + if t != nil { + teamIDs = append(teamIDs, t.ID()) + } + } + if !operator.IsReadableTeam(teamIDs...) { + res[k] = nil + } + } + return res, nil +} + +func (i *User) GetUserByCredentials(ctx context.Context, inp interfaces.GetUserByCredentials) (u *user.User, err error) { + u, err = i.userRepo.FindByNameOrEmail(ctx, inp.Email) + if err != nil && !errors.Is(rerror.ErrNotFound, err) { + return nil, err + } else if u == nil { + return nil, interfaces.ErrInvalidUserEmail + } + matched, err := u.MatchPassword(inp.Password) + if err != nil { + return nil, err + } + if !matched { + return nil, interfaces.ErrSignupInvalidPassword + } + if u.Verification() == nil || !u.Verification().IsVerified() { + return nil, interfaces.ErrNotVerifiedUser + } + return u, nil +} + +func (i *User) GetUserBySubject(ctx context.Context, sub string) (u *user.User, err error) { + u, err = i.userRepo.FindByAuth0Sub(ctx, sub) + if err != nil { + return nil, err + } + return u, nil +} + +func (i *User) StartPasswordReset(ctx context.Context, email string) error { + tx, err := i.transaction.Begin() + if err != nil { + return err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + u, err := i.userRepo.FindByEmail(ctx, email) + if err != nil { + return err + } + + pr := user.NewPasswordReset() + u.SetPasswordReset(pr) + + if err := i.userRepo.Save(ctx, u); err != nil { + return err + } + + var TextOut, HTMLOut bytes.Buffer + link := i.authSrvUIDomain + "/?pwd-reset-token=" + pr.Token + passwordResetMailContent.UserName = u.Name() + passwordResetMailContent.ActionURL = htmlTmpl.URL(link) + + if err := authTextTMPL.Execute(&TextOut, passwordResetMailContent); err != nil { + return err + } + if err := authHTMLTMPL.Execute(&HTMLOut, passwordResetMailContent); err != nil { + return err + } + + err = i.mailer.SendMail([]gateway.Contact{ + { + Email: u.Email(), + Name: u.Name(), + }, + }, "Password reset", TextOut.String(), HTMLOut.String()) + if err != nil { + return err + } + + tx.Commit() + return nil +} + +func (i *User) PasswordReset(ctx context.Context, password, token string) error { + tx, err := i.transaction.Begin() + if err != nil { + return err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + u, err := i.userRepo.FindByPasswordResetRequest(ctx, token) + if err != nil { + return err + } + + passwordReset := u.PasswordReset() + ok := passwordReset.Validate(token) + + if !ok { + return interfaces.ErrUserInvalidPasswordReset + } + + u.SetPasswordReset(nil) + + if err := u.SetPassword(password); err != nil { + return err + } + + if err := i.userRepo.Save(ctx, u); err != nil { + return err + } + + tx.Commit() + return nil +} + +func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operator *usecase.Operator) (u *user.User, err error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + + if p.Password != nil { + if p.PasswordConfirmation == nil || *p.Password != *p.PasswordConfirmation { + return nil, interfaces.ErrUserInvalidPasswordConfirmation + } + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + var team *user.Team + + u, err = i.userRepo.FindByID(ctx, operator.User) + if err != nil { + return nil, err + } + + if p.Name != nil && *p.Name != u.Name() { + // username should not be a valid mail + if _, err := mail.ParseAddress(*p.Name); err == nil { + return nil, interfaces.ErrSignupInvalidName + } + // make sure the username is not exists + if userByName, _ := i.userRepo.FindByName(ctx, *p.Name); userByName != nil { + return nil, interfaces.ErrSignupInvalidName + } + oldName := u.Name() + u.UpdateName(*p.Name) + + team, err = i.teamRepo.FindByID(ctx, u.Team()) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, err + } + + tn := team.Name() + if tn == "" || tn == oldName { + team.Rename(*p.Name) + } else { + team = nil + } + } + if p.Email != nil { + if err := u.UpdateEmail(*p.Email); err != nil { + return nil, err + } + } + if p.Lang != nil { + u.UpdateLang(*p.Lang) + } + if p.Theme != nil { + u.UpdateTheme(*p.Theme) + } + + if p.Password != nil && u.HasAuthProvider("reearth") { + if err := u.SetPassword(*p.Password); err != nil { + return nil, err + } + } + + // Update Auth0 users + if p.Name != nil || p.Email != nil || p.Password != nil { + for _, a := range u.Auths() { + if a.Provider != "auth0" { + continue + } + if _, err := i.authenticator.UpdateUser(gateway.AuthenticatorUpdateUserParam{ + ID: a.Sub, + Name: p.Name, + Email: p.Email, + Password: p.Password, + }); err != nil { + return nil, err + } + } + } + + if team != nil { + err = i.teamRepo.Save(ctx, team) + if err != nil { + return nil, err + } + } + + err = i.userRepo.Save(ctx, u) + if err != nil { + return nil, err + } + + tx.Commit() + return u, nil +} + +func (i *User) RemoveMyAuth(ctx context.Context, authProvider string, operator *usecase.Operator) (u *user.User, err error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + u, err = i.userRepo.FindByID(ctx, operator.User) + if err != nil { + return nil, err + } + + u.RemoveAuthByProvider(authProvider) + + err = i.userRepo.Save(ctx, u) + if err != nil { + return nil, err + } + + tx.Commit() + return u, nil +} + +func (i *User) SearchUser(ctx context.Context, nameOrEmail string, operator *usecase.Operator) (u *user.User, err error) { + u, err = i.userRepo.FindByNameOrEmail(ctx, nameOrEmail) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, err + } + return u, nil +} + +func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase.Operator) (err error) { + if operator == nil || operator.User.IsNil() { + return nil + } + + if userID.IsNil() || userID != operator.User { + return errors.New("invalid user id") + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + u, err := i.userRepo.FindByID(ctx, userID) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return err + } + if u == nil { + return nil + } + + teams, err := i.teamRepo.FindByUser(ctx, u.ID()) + if err != nil { + return err + } + + deleter := ProjectDeleter{ + SceneDeleter: SceneDeleter{ + Scene: i.sceneRepo, + SceneLock: i.sceneLockRepo, + Layer: i.layerRepo, + Property: i.propertyRepo, + Dataset: i.datasetRepo, + DatasetSchema: i.datasetSchemaRepo, + }, + File: i.file, + Project: i.projectRepo, + } + updatedTeams := make([]*user.Team, 0, len(teams)) + deletedTeams := []id.TeamID{} + + for _, team := range teams { + if !team.IsPersonal() && !team.Members().IsOnlyOwner(u.ID()) { + _ = team.Members().Leave(u.ID()) + updatedTeams = append(updatedTeams, team) + continue + } + + // Delete all projects + err := repo.IterateProjectsByTeam(i.projectRepo, ctx, team.ID(), 50, func(projects []*project.Project) error { + for _, prj := range projects { + if err := deleter.Delete(ctx, prj, true, operator); err != nil { + return err + } + } + return nil + }) + if err != nil { + return err + } + + deletedTeams = append(deletedTeams, team.ID()) + } + + // Save teams + if err := i.teamRepo.SaveAll(ctx, updatedTeams); err != nil { + return err + } + + // Delete teams + if err := i.teamRepo.RemoveAll(ctx, deletedTeams); err != nil { + return err + } + + // Delete user + if err := i.userRepo.Remove(ctx, u.ID()); err != nil { + return err + } + + tx.Commit() + return nil +} + +func (i *User) CreateVerification(ctx context.Context, email string) error { + u, err := i.userRepo.FindByEmail(ctx, email) + if err != nil { + return err + } + if u.Verification().IsVerified() { + return nil + } + return i.createVerification(ctx, u) +} + +func (i *User) VerifyUser(ctx context.Context, code string) (*user.User, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + u, err := i.userRepo.FindByVerification(ctx, code) + if err != nil { + return nil, err + } + if u.Verification().IsExpired() { + return nil, errors.New("verification expired") + } + u.Verification().SetVerified(true) + err = i.userRepo.Save(ctx, u) + if err != nil { + return nil, err + } + + tx.Commit() + return u, nil +} diff --git a/server/internal/usecase/interactor/user_signup.go b/server/internal/usecase/interactor/user_signup.go new file mode 100644 index 000000000..daedd3998 --- /dev/null +++ b/server/internal/usecase/interactor/user_signup.go @@ -0,0 +1,388 @@ +package interactor + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + htmlTmpl "html/template" + "net/http" + "net/url" + "path" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/user" +) + +func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (*user.User, *user.Team, error) { + if inp.Name == "" { + return nil, nil, interfaces.ErrSignupInvalidName + } + if err := i.verifySignupSecret(inp.Secret); err != nil { + return nil, nil, err + } + + tx, err := i.transaction.Begin() + if err != nil { + return nil, nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + // Check if user and team already exists + existedUser, existedTeam, err := i.userAlreadyExists(ctx, inp.User.UserID, inp.Sub, &inp.Name, inp.User.TeamID) + if err != nil { + return nil, nil, err + } + + if existedUser != nil { + if existedUser.Verification() == nil || !existedUser.Verification().IsVerified() { + // if user exists but not verified -> create a new verification + if err := i.createVerification(ctx, existedUser); err != nil { + return nil, nil, err + } + return existedUser, existedTeam, nil + } + return nil, nil, interfaces.ErrUserAlreadyExists + } + + // Initialize user and team + var auth *user.Auth + if inp.Sub != nil { + auth = user.AuthFromAuth0Sub(*inp.Sub).Ref() + } + u, team, err := user.Init(user.InitParams{ + Email: inp.Email, + Name: inp.Name, + Sub: auth, + Password: inp.Password, + Lang: inp.User.Lang, + Theme: inp.User.Theme, + UserID: inp.User.UserID, + TeamID: inp.User.TeamID, + }) + if err != nil { + return nil, nil, err + } + + if err := i.userRepo.Save(ctx, u); err != nil { + return nil, nil, err + } + if err := i.teamRepo.Save(ctx, team); err != nil { + return nil, nil, err + } + + if err := i.createVerification(ctx, u); err != nil { + return nil, nil, err + } + + tx.Commit() + return u, team, nil +} + +func (i *User) SignupOIDC(ctx context.Context, inp interfaces.SignupOIDCParam) (u *user.User, _ *user.Team, err error) { + if err := i.verifySignupSecret(inp.Secret); err != nil { + return nil, nil, err + } + + sub := inp.Sub + name := inp.Name + email := inp.Email + if sub == "" || email == "" { + ui, err := getUserInfoFromISS(ctx, inp.Issuer, inp.AccessToken) + if err != nil { + return nil, nil, err + } + sub = ui.Sub + email = ui.Email + if name == "" { + name = ui.Nickname + } + if name == "" { + name = ui.Name + } + if name == "" { + name = ui.Email + } + } + + tx, err := i.transaction.Begin() + if err != nil { + return nil, nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + // Check if user and team already exists + if existedUser, existedTeam, err := i.userAlreadyExists(ctx, inp.User.UserID, &sub, &name, inp.User.TeamID); err != nil { + return nil, nil, err + } else if existedUser != nil || existedTeam != nil { + return nil, nil, interfaces.ErrUserAlreadyExists + } + + // Initialize user and team + u, team, err := user.Init(user.InitParams{ + Email: email, + Name: name, + Sub: user.AuthFromAuth0Sub(sub).Ref(), + Lang: inp.User.Lang, + Theme: inp.User.Theme, + UserID: inp.User.UserID, + TeamID: inp.User.TeamID, + }) + if err != nil { + return nil, nil, err + } + + if err := i.userRepo.Save(ctx, u); err != nil { + return nil, nil, err + } + if err := i.teamRepo.Save(ctx, team); err != nil { + return nil, nil, err + } + + tx.Commit() + return u, team, nil +} + +func (i *User) verifySignupSecret(secret *string) error { + if i.signupSecret != "" && (secret == nil || *secret != i.signupSecret) { + return interfaces.ErrSignupInvalidSecret + } + return nil +} + +func (i *User) userAlreadyExists(ctx context.Context, userID *id.UserID, sub *string, name *string, teamID *id.TeamID) (*user.User, *user.Team, error) { + // Check if user already exists + var existedUser *user.User + var err error + + if userID != nil { + existedUser, err = i.userRepo.FindByID(ctx, *userID) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, nil, err + } + } else if sub != nil { + // Check if user already exists + existedUser, err = i.userRepo.FindByAuth0Sub(ctx, *sub) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, nil, err + } + } else if name != nil { + existedUser, err = i.userRepo.FindByName(ctx, *name) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, nil, err + } + } + + if existedUser != nil { + team, err := i.teamRepo.FindByID(ctx, existedUser.Team()) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, nil, err + } + return existedUser, team, nil + } + + // Check if team already exists + if teamID != nil { + existed, err := i.teamRepo.FindByID(ctx, *teamID) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, nil, err + } + if existed != nil { + return nil, existed, nil + } + } + + return nil, nil, nil +} + +func getUserInfoFromISS(ctx context.Context, iss, accessToken string) (UserInfo, error) { + if accessToken == "" { + return UserInfo{}, errors.New("invalid access token") + } + if iss == "" { + return UserInfo{}, errors.New("invalid issuer") + } + + var u string + c, err := getOpenIDConfiguration(ctx, iss) + if err != nil { + u2 := issToURL(iss, "/userinfo") + if u2 == nil { + return UserInfo{}, errors.New("invalid iss") + } + u = u2.String() + } else { + u = c.UserinfoEndpoint + } + return getUserInfo(ctx, u, accessToken) +} + +type OpenIDConfiguration struct { + UserinfoEndpoint string `json:"userinfo_endpoint"` +} + +func getOpenIDConfiguration(ctx context.Context, iss string) (c OpenIDConfiguration, err error) { + url := issToURL(iss, "/.well-known/openid-configuration") + if url == nil { + err = errors.New("invalid iss") + return + } + + if ctx == nil { + ctx = context.Background() + } + + req, err2 := http.NewRequestWithContext(ctx, http.MethodGet, url.String(), nil) + if err2 != nil { + err = err2 + return + } + + res, err2 := http.DefaultClient.Do(req) + if err2 != nil { + err = err2 + return + } + defer func() { + _ = res.Body.Close() + }() + if res.StatusCode != http.StatusOK { + err = errors.New("could not get user info") + return + } + if err2 := json.NewDecoder(res.Body).Decode(&c); err2 != nil { + err = fmt.Errorf("could not get user info: %w", err2) + return + } + return +} + +type UserInfo struct { + Sub string `json:"sub"` + Name string `json:"name"` + Nickname string `json:"nickname"` + Email string `json:"email"` + Error string `json:"error"` +} + +func getUserInfo(ctx context.Context, url, accessToken string) (ui UserInfo, err error) { + if ctx == nil { + ctx = context.Background() + } + + req, err2 := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err2 != nil { + err = err2 + return + } + + req.Header.Set("Authorization", "Bearer "+accessToken) + res, err2 := http.DefaultClient.Do(req) + if err2 != nil { + err = err2 + return + } + defer func() { + _ = res.Body.Close() + }() + + if res.StatusCode != http.StatusOK { + err = errors.New("could not get user info") + return + } + + if err2 := json.NewDecoder(res.Body).Decode(&ui); err2 != nil { + err = fmt.Errorf("could not get user info: %w", err2) + return + } + + if ui.Error != "" { + err = fmt.Errorf("could not get user info: %s", ui.Error) + return + } + if ui.Sub == "" { + err = fmt.Errorf("could not get user info: invalid response") + return + } + if ui.Email == "" { + err = fmt.Errorf("could not get user info: email scope missing") + return + } + + return +} + +func issToURL(iss, p string) *url.URL { + if iss == "" { + return nil + } + + if !strings.HasPrefix(iss, "https://") && !strings.HasPrefix(iss, "http://") { + iss = "https://" + iss + } + + u, err := url.Parse(iss) + if err == nil { + u.Path = path.Join(u.Path, p) + if u.Path == "/" { + u.Path = "" + } + return u + } + + return nil +} + +func (i *User) createVerification(ctx context.Context, u *user.User) error { + vr := user.NewVerification() + u.SetVerification(vr) + + if err := i.userRepo.Save(ctx, u); err != nil { + return err + } + + var text, html bytes.Buffer + link := i.authSrvUIDomain + "/?user-verification-token=" + vr.Code() + signupMailContent := mailContent{ + Message: "Thank you for signing up to Re:Earth. Please verify your email address by clicking the button below.", + Suffix: "You can use this email address to log in to Re:Earth account anytime.", + ActionLabel: "Activate your account and log in", + UserName: u.Email(), + ActionURL: htmlTmpl.URL(link), + } + if err := authTextTMPL.Execute(&text, signupMailContent); err != nil { + return err + } + if err := authHTMLTMPL.Execute(&html, signupMailContent); err != nil { + return err + } + + if err := i.mailer.SendMail( + []gateway.Contact{ + { + Email: u.Email(), + Name: u.Name(), + }, + }, + "email verification", + text.String(), + html.String(), + ); err != nil { + return err + } + + return nil +} diff --git a/server/internal/usecase/interactor/user_signup_test.go b/server/internal/usecase/interactor/user_signup_test.go new file mode 100644 index 000000000..ca0c50406 --- /dev/null +++ b/server/internal/usecase/interactor/user_signup_test.go @@ -0,0 +1,546 @@ +package interactor + +import ( + "context" + "errors" + "net/http" + "net/url" + "testing" + "time" + + "github.com/jarcoal/httpmock" + "github.com/reearth/reearth-backend/internal/infrastructure/mailer" + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" + "golang.org/x/text/language" +) + +func TestUser_Signup(t *testing.T) { + user.DefaultPasswordEncoder = &user.NoopPasswordEncoder{} + uid := id.NewUserID() + tid := id.NewTeamID() + mocktime := time.Time{} + mockcode := "CODECODE" + + defer user.MockNow(mocktime)() + defer user.MockGenerateVerificationCode(mockcode)() + + tests := []struct { + name string + signupSecret string + authSrvUIDomain string + createUserBefore *user.User + args interfaces.SignupParam + wantUser *user.User + wantTeam *user.Team + wantMailTo []gateway.Contact + wantMailSubject string + wantMailContent string + wantError error + }{ + { + name: "without secret", + signupSecret: "", + authSrvUIDomain: "https://reearth.io", + args: interfaces.SignupParam{ + Sub: lo.ToPtr("SUB"), + Email: "aaa@bbb.com", + Name: "NAME", + Password: lo.ToPtr("PAss00!!"), + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Name("NAME"). + Auths([]user.Auth{{Provider: "", Sub: "SUB"}}). + Email("aaa@bbb.com"). + PasswordPlainText("PAss00!!"). + Verification(user.VerificationFrom(mockcode, mocktime.Add(24*time.Hour), false)). + MustBuild(), + wantTeam: user.NewTeam(). + ID(tid). + Name("NAME"). + Members(map[id.UserID]user.Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + wantMailTo: []gateway.Contact{{Email: "aaa@bbb.com", Name: "NAME"}}, + wantMailSubject: "email verification", + wantMailContent: "https://reearth.io/?user-verification-token=CODECODE", + wantError: nil, + }, + { + name: "existing but not valdiated user", + signupSecret: "", + authSrvUIDomain: "", + createUserBefore: user.New(). + ID(uid). + Team(tid). + Email("aaa@bbb.com"). + MustBuild(), + args: interfaces.SignupParam{ + Email: "aaa@bbb.com", + Name: "NAME", + Password: lo.ToPtr("PAss00!!"), + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Email("aaa@bbb.com"). + Verification(user.VerificationFrom(mockcode, mocktime.Add(24*time.Hour), false)). + MustBuild(), + wantTeam: nil, + wantMailTo: []gateway.Contact{{Email: "aaa@bbb.com", Name: ""}}, + wantMailSubject: "email verification", + wantMailContent: "/?user-verification-token=CODECODE", + wantError: nil, + }, + { + name: "existing and valdiated user", + signupSecret: "", + authSrvUIDomain: "", + createUserBefore: user.New(). + ID(uid). + Team(tid). + Email("aaa@bbb.com"). + Verification(user.VerificationFrom(mockcode, mocktime, true)). + MustBuild(), + args: interfaces.SignupParam{ + Sub: lo.ToPtr("SUB"), + Email: "aaa@bbb.com", + Name: "NAME", + Password: lo.ToPtr("PAss00!!"), + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: nil, + wantTeam: nil, + wantError: interfaces.ErrUserAlreadyExists, + }, + { + name: "without secret 2", + signupSecret: "", + authSrvUIDomain: "", + args: interfaces.SignupParam{ + Sub: lo.ToPtr("SUB"), + Email: "aaa@bbb.com", + Name: "NAME", + Password: lo.ToPtr("PAss00!!"), + Secret: lo.ToPtr("hogehoge"), + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Name("NAME"). + Auths([]user.Auth{{Provider: "", Sub: "SUB"}}). + Email("aaa@bbb.com"). + PasswordPlainText("PAss00!!"). + Verification(user.VerificationFrom(mockcode, mocktime.Add(24*time.Hour), false)). + MustBuild(), + wantTeam: user.NewTeam(). + ID(tid). + Name("NAME"). + Members(map[id.UserID]user.Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + wantMailTo: []gateway.Contact{{Email: "aaa@bbb.com", Name: "NAME"}}, + wantMailSubject: "email verification", + wantMailContent: "/?user-verification-token=CODECODE", + wantError: nil, + }, + { + name: "with secret", + signupSecret: "SECRET", + authSrvUIDomain: "", + args: interfaces.SignupParam{ + Sub: lo.ToPtr("SUB"), + Email: "aaa@bbb.com", + Name: "NAME", + Password: lo.ToPtr("PAss00!!"), + Secret: lo.ToPtr("SECRET"), + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + Lang: &language.Japanese, + Theme: user.ThemeDark.Ref(), + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Name("NAME"). + Auths([]user.Auth{{Provider: "", Sub: "SUB"}}). + Email("aaa@bbb.com"). + PasswordPlainText("PAss00!!"). + Lang(language.Japanese). + Theme(user.ThemeDark). + Verification(user.VerificationFrom(mockcode, mocktime.Add(24*time.Hour), false)). + MustBuild(), + wantTeam: user.NewTeam(). + ID(tid). + Name("NAME"). + Members(map[id.UserID]user.Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + wantMailTo: []gateway.Contact{{Email: "aaa@bbb.com", Name: "NAME"}}, + wantMailSubject: "email verification", + wantMailContent: "/?user-verification-token=CODECODE", + wantError: nil, + }, + { + name: "invalid secret", + signupSecret: "SECRET", + authSrvUIDomain: "", + args: interfaces.SignupParam{ + Sub: lo.ToPtr("SUB"), + Email: "aaa@bbb.com", + Name: "NAME", + Password: lo.ToPtr("PAss00!!"), + Secret: lo.ToPtr("SECRET!"), + }, + wantError: interfaces.ErrSignupInvalidSecret, + }, + { + name: "invalid secret 2", + signupSecret: "SECRET", + authSrvUIDomain: "", + args: interfaces.SignupParam{ + Sub: lo.ToPtr("SUB"), + Email: "aaa@bbb.com", + Name: "NAME", + Password: lo.ToPtr("PAss00!!"), + }, + wantError: interfaces.ErrSignupInvalidSecret, + }, + { + name: "invalid email", + args: interfaces.SignupParam{ + Email: "aaa", + Name: "NAME", + Password: lo.ToPtr("PAss00!!"), + }, + wantError: user.ErrInvalidEmail, + }, + { + name: "invalid password", + args: interfaces.SignupParam{ + Email: "aaa@bbb.com", + Name: "NAME", + Password: lo.ToPtr("PAss00"), + }, + wantError: user.ErrPasswordLength, + }, + { + name: "invalid name", + args: interfaces.SignupParam{ + Email: "aaa@bbb.com", + Name: "", + Password: lo.ToPtr("PAss00!!"), + }, + wantError: interfaces.ErrSignupInvalidName, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + // t.Parallel() cannot be used + r := memory.New() + if tt.createUserBefore != nil { + assert.NoError(t, r.User.Save( + context.Background(), + tt.createUserBefore), + ) + } + m := mailer.NewMock() + g := &gateway.Container{Mailer: m} + uc := NewUser(r, g, tt.signupSecret, tt.authSrvUIDomain) + user, team, err := uc.Signup(context.Background(), tt.args) + assert.Equal(t, tt.wantUser, user) + assert.Equal(t, tt.wantTeam, team) + assert.Equal(t, tt.wantError, err) + mails := m.Mails() + if tt.wantMailSubject == "" { + assert.Empty(t, mails) + } else { + assert.Equal(t, 1, len(mails)) + assert.Equal(t, tt.wantMailSubject, mails[0].Subject) + assert.Equal(t, tt.wantMailTo, mails[0].To) + assert.Contains(t, mails[0].PlainContent, tt.wantMailContent) + } + }) + } +} + +func TestUser_SignupOIDC(t *testing.T) { + httpmock.Activate() + defer httpmock.DeactivateAndReset() + + httpmock.RegisterResponder( + "GET", + "https://issuer/.well-known/openid-configuration", + httpmock.NewStringResponder(200, `{"userinfo_endpoint":"https://issuer/userinfo"}`), + ) + + httpmock.RegisterResponder( + "GET", + "https://issuer/userinfo", + func(req *http.Request) (*http.Response, error) { + if req.Header.Get("Authorization") == "Bearer accesstoken" { + return httpmock.NewStringResponse(200, `{"sub":"SUB","email":"x@y.z","name":"NAME"}`), nil + } + return httpmock.NewStringResponse(401, "Unauthorized"), nil + }, + ) + + user.DefaultPasswordEncoder = &user.NoopPasswordEncoder{} + uid := id.NewUserID() + tid := id.NewTeamID() + mocktime := time.Time{} + mockcode := "CODECODE" + + defer user.MockNow(mocktime)() + defer user.MockGenerateVerificationCode(mockcode)() + + tests := []struct { + name string + signupSecret string + authSrvUIDomain string + createUserBefore *user.User + args interfaces.SignupOIDCParam + wantUser *user.User + wantTeam *user.Team + wantMail *mailer.Mail + wantMailTo string + wantMailSubject string + wantMailContent string + wantError error + }{ + { + name: "userinfo", + signupSecret: "", + authSrvUIDomain: "", + args: interfaces.SignupOIDCParam{ + AccessToken: "accesstoken", + Issuer: "https://issuer", + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Name("NAME"). + Auths([]user.Auth{{Provider: "", Sub: "SUB"}}). + Email("x@y.z"). + MustBuild(), + wantTeam: user.NewTeam(). + ID(tid). + Name("NAME"). + Members(map[id.UserID]user.Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + wantError: nil, + }, + { + name: "no userinfo", + signupSecret: "", + authSrvUIDomain: "", + args: interfaces.SignupOIDCParam{ + Email: "aaa@bbb.com", + Name: "name", + AccessToken: "accesstoken", + Issuer: "https://issuer", + Sub: "sub", + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Name("name"). + Auths([]user.Auth{{Provider: "", Sub: "sub"}}). + Email("aaa@bbb.com"). + MustBuild(), + wantTeam: user.NewTeam(). + ID(tid). + Name("name"). + Members(map[id.UserID]user.Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + wantError: nil, + }, + { + name: "no userinfo with secret", + signupSecret: "SECRET", + authSrvUIDomain: "", + args: interfaces.SignupOIDCParam{ + Email: "aaa@bbb.com", + Name: "name", + AccessToken: "accesstoken", + Issuer: "https://issuer", + Sub: "sub", + Secret: lo.ToPtr("SECRET"), + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Name("name"). + Auths([]user.Auth{{Provider: "", Sub: "sub"}}). + Email("aaa@bbb.com"). + MustBuild(), + wantTeam: user.NewTeam(). + ID(tid). + Name("name"). + Members(map[id.UserID]user.Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + wantError: nil, + }, + { + name: "existed but not validated user", + signupSecret: "", + authSrvUIDomain: "", + createUserBefore: user.New(). + ID(uid). + Email("aaa@bbb.com"). + MustBuild(), + args: interfaces.SignupOIDCParam{ + AccessToken: "accesstoken", + Issuer: "https://issuer", + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantError: interfaces.ErrUserAlreadyExists, + }, + { + name: "existed and verified user", + signupSecret: "", + authSrvUIDomain: "", + createUserBefore: user.New(). + ID(uid). + Email("aaa@bbb.com"). + Verification(user.VerificationFrom(mockcode, mocktime, true)). + MustBuild(), + args: interfaces.SignupOIDCParam{ + AccessToken: "accesstoken", + Issuer: "https://issuer", + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantError: interfaces.ErrUserAlreadyExists, + }, + { + name: "invalid secret", + signupSecret: "SECRET", + authSrvUIDomain: "", + args: interfaces.SignupOIDCParam{ + Email: "aaa@bbb.com", + Name: "name", + AccessToken: "accesstoken", + Issuer: "https://issuer", + Sub: "sub", + Secret: lo.ToPtr("SECRET!"), + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantError: interfaces.ErrSignupInvalidSecret, + }, + { + name: "invalid email", + args: interfaces.SignupOIDCParam{ + Email: "aaabbbcom", + Name: "name", + AccessToken: "accesstoken", + Issuer: "https://issuer", + Sub: "sub", + }, + wantError: user.ErrInvalidEmail, + }, + { + name: "invalid access token", + args: interfaces.SignupOIDCParam{ + Email: "", + Name: "", + AccessToken: "", + Issuer: "https://issuer", + Sub: "sub", + }, + wantError: errors.New("invalid access token"), + }, + { + name: "invalid issuer", + args: interfaces.SignupOIDCParam{ + Email: "", + Name: "", + AccessToken: "access token", + Issuer: "", + Sub: "sub", + }, + wantError: errors.New("invalid issuer"), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + // t.Parallel() cannot be used + r := memory.New() + if tt.createUserBefore != nil { + assert.NoError(t, r.User.Save( + context.Background(), + tt.createUserBefore), + ) + } + m := mailer.NewMock() + g := &gateway.Container{Mailer: m} + uc := NewUser(r, g, tt.signupSecret, tt.authSrvUIDomain) + user, team, err := uc.SignupOIDC(context.Background(), tt.args) + assert.Equal(t, tt.wantUser, user) + assert.Equal(t, tt.wantTeam, team) + assert.Equal(t, tt.wantError, err) + assert.Empty(t, m.Mails()) + }) + } +} + +func TestIssToURL(t *testing.T) { + assert.Nil(t, issToURL("", "")) + assert.Equal(t, &url.URL{Scheme: "https", Host: "iss.com"}, issToURL("iss.com", "")) + assert.Equal(t, &url.URL{Scheme: "https", Host: "iss.com"}, issToURL("https://iss.com", "")) + assert.Equal(t, &url.URL{Scheme: "http", Host: "iss.com"}, issToURL("http://iss.com", "")) + assert.Equal(t, &url.URL{Scheme: "https", Host: "iss.com", Path: ""}, issToURL("https://iss.com/", "")) + assert.Equal(t, &url.URL{Scheme: "https", Host: "iss.com", Path: "/hoge"}, issToURL("https://iss.com/hoge", "")) + assert.Equal(t, &url.URL{Scheme: "https", Host: "iss.com", Path: "/hoge/foobar"}, issToURL("https://iss.com/hoge", "foobar")) +} diff --git a/server/internal/usecase/interfaces/asset.go b/server/internal/usecase/interfaces/asset.go new file mode 100644 index 000000000..1028d7b27 --- /dev/null +++ b/server/internal/usecase/interfaces/asset.go @@ -0,0 +1,35 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" +) + +type AssetFilterType string + +const ( + AssetFilterDate AssetFilterType = "DATE" + AssetFilterSize AssetFilterType = "SIZE" + AssetFilterName AssetFilterType = "NAME" +) + +type CreateAssetParam struct { + TeamID id.TeamID + File *file.File +} + +var ( + ErrCreateAssetFailed error = errors.New("failed to create asset") +) + +type Asset interface { + Fetch(context.Context, []id.AssetID, *usecase.Operator) ([]*asset.Asset, error) + FindByTeam(context.Context, id.TeamID, *string, *asset.SortType, *usecase.Pagination, *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) + Create(context.Context, CreateAssetParam, *usecase.Operator) (*asset.Asset, error) + Remove(context.Context, id.AssetID, *usecase.Operator) (id.AssetID, error) +} diff --git a/server/internal/usecase/interfaces/common.go b/server/internal/usecase/interfaces/common.go new file mode 100644 index 000000000..3fa486959 --- /dev/null +++ b/server/internal/usecase/interfaces/common.go @@ -0,0 +1,31 @@ +package interfaces + +import "errors" + +type ListOperation string + +const ( + ListOperationAdd ListOperation = "add" + ListOperationMove ListOperation = "move" + ListOperationRemove ListOperation = "remove" +) + +var ( + ErrSceneIsLocked error = errors.New("scene is locked") + ErrOperationDenied error = errors.New("operation denied") + ErrFileNotIncluded error = errors.New("file not included") +) + +type Container struct { + Asset Asset + Dataset Dataset + Layer Layer + Plugin Plugin + Project Project + Property Property + Published Published + Scene Scene + Tag Tag + Team Team + User User +} diff --git a/server/internal/usecase/interfaces/dataset.go b/server/internal/usecase/interfaces/dataset.go new file mode 100644 index 000000000..2e39fde06 --- /dev/null +++ b/server/internal/usecase/interfaces/dataset.go @@ -0,0 +1,79 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" +) + +type AddDatasetSchemaParam struct { + SceneId id.SceneID + Name string + RepresentativeField *id.DatasetFieldID +} + +type AddDynamicDatasetSchemaParam struct { + SceneId id.SceneID +} + +type AddDynamicDatasetParam struct { + SchemaId id.DatasetSchemaID + Author string + Content string + Target *string + Lat *float64 + Lng *float64 +} + +type ImportDatasetParam struct { + File *file.File + SceneId id.SceneID + SchemaId *id.DatasetSchemaID +} + +type ImportDatasetFromGoogleSheetParam struct { + Token string + FileID string + SheetName string + SceneId id.SceneID + SchemaId *id.DatasetSchemaID +} + +type RemoveDatasetSchemaParam struct { + SchemaID id.DatasetSchemaID + Force *bool +} + +type UpdateDatasetSchemaParam struct { + SchemaId id.DatasetSchemaID + Name string +} + +var ( + ErrNoDataSourceAvailable error = errors.New("no datasource available") + ErrDataSourceInvalidURL error = errors.New("invalid url") + ErrDatasetInvalidDepth error = errors.New("invalid depth") +) + +type Dataset interface { + Fetch(context.Context, []id.DatasetID, *usecase.Operator) (dataset.List, error) + GraphFetch(context.Context, id.DatasetID, int, *usecase.Operator) (dataset.List, error) + FetchSchema(context.Context, []id.DatasetSchemaID, *usecase.Operator) (dataset.SchemaList, error) + ImportDataset(context.Context, ImportDatasetParam, *usecase.Operator) (*dataset.Schema, error) + ImportDatasetFromGoogleSheet(context.Context, ImportDatasetFromGoogleSheetParam, *usecase.Operator) (*dataset.Schema, error) + GraphFetchSchema(context.Context, id.DatasetSchemaID, int, *usecase.Operator) (dataset.SchemaList, error) + AddDynamicDatasetSchema(context.Context, AddDynamicDatasetSchemaParam) (*dataset.Schema, error) + AddDynamicDataset(context.Context, AddDynamicDatasetParam) (*dataset.Schema, *dataset.Dataset, error) + FindBySchema(context.Context, id.DatasetSchemaID, *usecase.Pagination, *usecase.Operator) (dataset.List, *usecase.PageInfo, error) + CountBySchema(context.Context, id.DatasetSchemaID) (int, error) + FindSchemaByScene(context.Context, id.SceneID, *usecase.Pagination, *usecase.Operator) (dataset.SchemaList, *usecase.PageInfo, error) + FindDynamicSchemaByScene(context.Context, id.SceneID) (dataset.SchemaList, error) + RemoveDatasetSchema(context.Context, RemoveDatasetSchemaParam, *usecase.Operator) (id.DatasetSchemaID, error) + UpdateDatasetSchema(context.Context, UpdateDatasetSchemaParam, *usecase.Operator) (*dataset.Schema, error) + Sync(context.Context, id.SceneID, string, *usecase.Operator) (dataset.SchemaList, dataset.List, error) + AddDatasetSchema(context.Context, AddDatasetSchemaParam, *usecase.Operator) (*dataset.Schema, error) +} diff --git a/server/internal/usecase/interfaces/layer.go b/server/internal/usecase/interfaces/layer.go new file mode 100644 index 000000000..afd5deb93 --- /dev/null +++ b/server/internal/usecase/interfaces/layer.go @@ -0,0 +1,108 @@ +package interfaces + +import ( + "context" + "errors" + "io" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/decoding" + "github.com/reearth/reearth-backend/pkg/property" +) + +type AddLayerItemInput struct { + ParentLayerID id.LayerID + ExtensionID *id.PluginExtensionID + Index *int + LinkedDatasetID *id.DatasetID + Name string + LatLng *property.LatLng +} + +type AddLayerGroupInput struct { + ParentLayerID id.LayerID + ExtensionID *id.PluginExtensionID + Index *int + LinkedDatasetSchemaID *id.DatasetSchemaID + RepresentativeFieldId *id.DatasetFieldID + Name string +} + +type UpdateLayerInput struct { + LayerID id.LayerID + Name *string + Visible *bool +} + +type MoveLayerInput struct { + LayerID id.LayerID + DestLayerID *id.LayerID + Index int +} + +type AddInfoboxFieldParam struct { + LayerID id.LayerID + PluginID id.PluginID + ExtensionID id.PluginExtensionID + Index *int +} + +type MoveInfoboxFieldParam struct { + LayerID id.LayerID + InfoboxFieldID id.InfoboxFieldID + Index int +} + +type RemoveInfoboxFieldParam struct { + LayerID id.LayerID + InfoboxFieldID id.InfoboxFieldID +} +type ImportLayerParam struct { + LayerID id.LayerID + File *file.File + Format decoding.LayerEncodingFormat +} + +var ( + ErrParentLayerNotFound error = errors.New("parent layer not found") + ErrPluginNotFound error = errors.New("plugin not found") + ErrExtensionNotFound error = errors.New("extension not found") + ErrInfoboxNotFound error = errors.New("infobox not found") + ErrInfoboxAlreadyExists error = errors.New("infobox already exists") + ErrCannotAddLayerToLinkedLayerGroup error = errors.New("cannot add layer to linked layer group") + ErrCannotRemoveLayerToLinkedLayerGroup error = errors.New("cannot remove layer to linked layer group") + ErrLinkedLayerItemCannotBeMoved error = errors.New("linked layer item cannot be moved") + ErrLayerCannotBeMovedToLinkedLayerGroup error = errors.New("layer cannot be moved to linked layer group") + ErrCannotMoveLayerToOtherScene error = errors.New("layer cannot layer to other scene") + ErrExtensionTypeMustBePrimitive error = errors.New("extension type must be primitive") + ErrExtensionTypeMustBeBlock error = errors.New("extension type must be block") + ErrInvalidExtensionType error = errors.New("invalid extension type") +) + +type Layer interface { + Fetch(context.Context, []id.LayerID, *usecase.Operator) (layer.List, error) + FetchGroup(context.Context, []id.LayerID, *usecase.Operator) ([]*layer.Group, error) + FetchItem(context.Context, []id.LayerID, *usecase.Operator) ([]*layer.Item, error) + FetchParent(context.Context, id.LayerID, *usecase.Operator) (*layer.Group, error) + FetchByProperty(context.Context, id.PropertyID, *usecase.Operator) (layer.Layer, error) + FetchMerged(context.Context, id.LayerID, *id.LayerID, *usecase.Operator) (*layer.Merged, error) + FetchParentAndMerged(context.Context, id.LayerID, *usecase.Operator) (*layer.Merged, error) + FetchByTag(context.Context, id.TagID, *usecase.Operator) (layer.List, error) + Export(context.Context, id.LayerID, string) (io.Reader, string, error) + AddItem(context.Context, AddLayerItemInput, *usecase.Operator) (*layer.Item, *layer.Group, error) + AddGroup(context.Context, AddLayerGroupInput, *usecase.Operator) (*layer.Group, *layer.Group, error) + Remove(context.Context, id.LayerID, *usecase.Operator) (id.LayerID, *layer.Group, error) + Update(context.Context, UpdateLayerInput, *usecase.Operator) (layer.Layer, error) + Move(context.Context, MoveLayerInput, *usecase.Operator) (id.LayerID, *layer.Group, *layer.Group, int, error) + CreateInfobox(context.Context, id.LayerID, *usecase.Operator) (layer.Layer, error) + RemoveInfobox(context.Context, id.LayerID, *usecase.Operator) (layer.Layer, error) + AddInfoboxField(context.Context, AddInfoboxFieldParam, *usecase.Operator) (*layer.InfoboxField, layer.Layer, error) + MoveInfoboxField(context.Context, MoveInfoboxFieldParam, *usecase.Operator) (id.InfoboxFieldID, layer.Layer, int, error) + RemoveInfoboxField(context.Context, RemoveInfoboxFieldParam, *usecase.Operator) (id.InfoboxFieldID, layer.Layer, error) + ImportLayer(context.Context, ImportLayerParam, *usecase.Operator) (layer.List, *layer.Group, error) + AttachTag(context.Context, id.LayerID, id.TagID, *usecase.Operator) (layer.Layer, error) + DetachTag(context.Context, id.LayerID, id.TagID, *usecase.Operator) (layer.Layer, error) +} diff --git a/server/internal/usecase/interfaces/plugin.go b/server/internal/usecase/interfaces/plugin.go new file mode 100644 index 000000000..115859c34 --- /dev/null +++ b/server/internal/usecase/interfaces/plugin.go @@ -0,0 +1,24 @@ +package interfaces + +import ( + "context" + "errors" + "io" + "net/url" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/scene" +) + +var ( + ErrPluginAlreadyRegistered = errors.New("plugin already registered") + ErrInvalidPluginPackage = errors.New("invalid plugin package") +) + +type Plugin interface { + Fetch(context.Context, []id.PluginID, *usecase.Operator) ([]*plugin.Plugin, error) + Upload(context.Context, io.Reader, id.SceneID, *usecase.Operator) (*plugin.Plugin, *scene.Scene, error) + UploadFromRemote(context.Context, *url.URL, id.SceneID, *usecase.Operator) (*plugin.Plugin, *scene.Scene, error) +} diff --git a/server/internal/usecase/interfaces/project.go b/server/internal/usecase/interfaces/project.go new file mode 100644 index 000000000..b37aa4ae6 --- /dev/null +++ b/server/internal/usecase/interfaces/project.go @@ -0,0 +1,61 @@ +package interfaces + +import ( + "context" + "errors" + "net/url" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +type CreateProjectParam struct { + TeamID id.TeamID + Visualizer visualizer.Visualizer + Name *string + Description *string + ImageURL *url.URL + Alias *string + Archived *bool +} + +type UpdateProjectParam struct { + ID id.ProjectID + Name *string + Description *string + Alias *string + Archived *bool + IsBasicAuthActive *bool + BasicAuthUsername *string + BasicAuthPassword *string + ImageURL *url.URL + PublicTitle *string + PublicDescription *string + PublicImage *string + PublicNoIndex *bool + DeletePublicImage bool + DeleteImageURL bool +} + +type PublishProjectParam struct { + ID id.ProjectID + Alias *string + Status project.PublishmentStatus +} + +var ( + ErrProjectAliasIsNotSet error = errors.New("project alias is not set") + ErrProjectAliasAlreadyUsed error = errors.New("project alias is already used by another project") +) + +type Project interface { + Fetch(context.Context, []id.ProjectID, *usecase.Operator) ([]*project.Project, error) + FindByTeam(context.Context, id.TeamID, *usecase.Pagination, *usecase.Operator) ([]*project.Project, *usecase.PageInfo, error) + Create(context.Context, CreateProjectParam, *usecase.Operator) (*project.Project, error) + Update(context.Context, UpdateProjectParam, *usecase.Operator) (*project.Project, error) + Publish(context.Context, PublishProjectParam, *usecase.Operator) (*project.Project, error) + CheckAlias(context.Context, string) (bool, error) + Delete(context.Context, id.ProjectID, *usecase.Operator) error +} diff --git a/server/internal/usecase/interfaces/property.go b/server/internal/usecase/interfaces/property.go new file mode 100644 index 000000000..1f295fff3 --- /dev/null +++ b/server/internal/usecase/interfaces/property.go @@ -0,0 +1,96 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type UpdatePropertyValueParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer + Value *property.Value +} + +type RemovePropertyFieldParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer +} + +type UploadFileParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer + File *file.File +} + +type LinkPropertyValueParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer + Links *property.Links +} + +type UnlinkPropertyValueParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer +} + +type AddPropertyItemParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer + Index *int + NameFieldValue *property.Value +} + +type MovePropertyItemParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer + Index int +} + +type RemovePropertyItemParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer +} + +type UpdatePropertyItemsParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer + Operations []UpdatePropertyItemsOperationParam +} + +type UpdatePropertyItemsOperationParam struct { + Operation ListOperation + ItemID *id.PropertyItemID + Index *int + NameFieldValue *property.Value +} + +var ( + ErrPropertyNotFound error = errors.New("property not found") + ErrPropertyInvalidType error = errors.New("property invalid type") + ErrInvalidFile error = errors.New("invalid file") + ErrFailedToUploadFile error = errors.New("failed to upload file") + ErrPropertySchemaMustBeSpecified error = errors.New("property schema must be specified") + ErrInvalidDatasetFieldID error = errors.New("invalid dataset field id") + ErrInvalidPropertyLinks error = errors.New("invalid property links") + ErrInvalidPropertyValue error = errors.New("invalid property value") +) + +type Property interface { + Fetch(context.Context, []id.PropertyID, *usecase.Operator) ([]*property.Property, error) + FetchSchema(context.Context, []id.PropertySchemaID, *usecase.Operator) ([]*property.Schema, error) + FetchMerged(context.Context, *id.PropertyID, *id.PropertyID, *id.DatasetID, *usecase.Operator) (*property.Merged, error) + UpdateValue(context.Context, UpdatePropertyValueParam, *usecase.Operator) (*property.Property, *property.GroupList, *property.Group, *property.Field, error) + RemoveField(context.Context, RemovePropertyFieldParam, *usecase.Operator) (*property.Property, error) + UploadFile(context.Context, UploadFileParam, *usecase.Operator) (*property.Property, *property.GroupList, *property.Group, *property.Field, error) + LinkValue(context.Context, LinkPropertyValueParam, *usecase.Operator) (*property.Property, *property.GroupList, *property.Group, *property.Field, error) + UnlinkValue(context.Context, UnlinkPropertyValueParam, *usecase.Operator) (*property.Property, *property.GroupList, *property.Group, *property.Field, error) + AddItem(context.Context, AddPropertyItemParam, *usecase.Operator) (*property.Property, *property.GroupList, *property.Group, error) + MoveItem(context.Context, MovePropertyItemParam, *usecase.Operator) (*property.Property, *property.GroupList, *property.Group, error) + RemoveItem(context.Context, RemovePropertyItemParam, *usecase.Operator) (*property.Property, error) + UpdateItems(context.Context, UpdatePropertyItemsParam, *usecase.Operator) (*property.Property, error) +} diff --git a/server/internal/usecase/interfaces/published.go b/server/internal/usecase/interfaces/published.go new file mode 100644 index 000000000..0034aede5 --- /dev/null +++ b/server/internal/usecase/interfaces/published.go @@ -0,0 +1,37 @@ +package interfaces + +import ( + "context" + "io" + "net/url" + + "github.com/reearth/reearth-backend/pkg/project" +) + +type ProjectPublishedMetadata struct { + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Image string `json:"image,omitempty"` + Noindex bool `json:"noindex,omitempty"` + IsBasicAuthActive bool `json:"isBasicAuthActive,omitempty"` + BasicAuthUsername string `json:"basicAuthUsername,omitempty"` + BasicAuthPassword string `json:"basicAuthPassword,omitempty"` +} + +func ProjectPublishedMetadataFrom(prj *project.Project) ProjectPublishedMetadata { + return ProjectPublishedMetadata{ + Title: prj.PublicTitle(), + Description: prj.PublicDescription(), + Image: prj.PublicImage(), + Noindex: prj.PublicNoIndex(), + IsBasicAuthActive: prj.IsBasicAuthActive(), + BasicAuthUsername: prj.BasicAuthUsername(), + BasicAuthPassword: prj.BasicAuthPassword(), + } +} + +type Published interface { + Metadata(context.Context, string) (ProjectPublishedMetadata, error) + Data(context.Context, string) (io.Reader, error) + Index(context.Context, string, *url.URL) (string, error) +} diff --git a/server/internal/usecase/interfaces/scene.go b/server/internal/usecase/interfaces/scene.go new file mode 100644 index 000000000..116664d58 --- /dev/null +++ b/server/internal/usecase/interfaces/scene.go @@ -0,0 +1,55 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +var ( + ErrPluginAlreadyInstalled error = errors.New("plugin already installed") + ErrPluginNotInstalled error = errors.New("plugin not installed") + ErrCannotUpgradeToPlugin error = errors.New("cannot upgrade to such plugin") + ErrExtensionTypeMustBeWidget error = errors.New("extension type must be widget") +) + +type Scene interface { + Fetch(context.Context, []id.SceneID, *usecase.Operator) ([]*scene.Scene, error) + FindByProject(context.Context, id.ProjectID, *usecase.Operator) (*scene.Scene, error) + Create(context.Context, id.ProjectID, *usecase.Operator) (*scene.Scene, error) + AddWidget(context.Context, id.SceneID, id.PluginID, id.PluginExtensionID, *usecase.Operator) (*scene.Scene, *scene.Widget, error) + UpdateWidget(context.Context, UpdateWidgetParam, *usecase.Operator) (*scene.Scene, *scene.Widget, error) + UpdateWidgetAlignSystem(context.Context, UpdateWidgetAlignSystemParam, *usecase.Operator) (*scene.Scene, error) + RemoveWidget(context.Context, id.SceneID, id.WidgetID, *usecase.Operator) (*scene.Scene, error) + InstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, *id.PropertyID, error) + UninstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, error) + UpgradePlugin(context.Context, id.SceneID, id.PluginID, id.PluginID, *usecase.Operator) (*scene.Scene, error) + AddCluster(context.Context, id.SceneID, string, *usecase.Operator) (*scene.Scene, *scene.Cluster, error) + UpdateCluster(context.Context, UpdateClusterParam, *usecase.Operator) (*scene.Scene, *scene.Cluster, error) + RemoveCluster(context.Context, id.SceneID, id.ClusterID, *usecase.Operator) (*scene.Scene, error) +} + +type UpdateWidgetParam struct { + SceneID id.SceneID + WidgetID id.WidgetID + Enabled *bool + Extended *bool + Location *scene.WidgetLocation + Index *int +} + +type UpdateWidgetAlignSystemParam struct { + SceneID id.SceneID + Location scene.WidgetLocation + Align *scene.WidgetAlignType +} + +type UpdateClusterParam struct { + ClusterID id.ClusterID + SceneID id.SceneID + Name *string + PropertyID *id.PropertyID +} diff --git a/server/internal/usecase/interfaces/tag.go b/server/internal/usecase/interfaces/tag.go new file mode 100644 index 000000000..cad800eec --- /dev/null +++ b/server/internal/usecase/interfaces/tag.go @@ -0,0 +1,56 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/tag" +) + +var ( + ErrNonemptyTagGroupCannotDelete = errors.New("can't delete non-empty tag group") +) + +type CreateTagItemParam struct { + Label string + SceneID id.SceneID + Parent *id.TagID + LinkedDatasetSchemaID *id.DatasetSchemaID + LinkedDatasetID *id.DatasetID + LinkedDatasetField *id.DatasetFieldID +} + +type CreateTagGroupParam struct { + Label string + SceneID id.SceneID + Tags []id.TagID +} + +type AttachItemToGroupParam struct { + ItemID, GroupID id.TagID +} + +type DetachItemToGroupParam struct { + ItemID, GroupID id.TagID +} + +type UpdateTagParam struct { + Label *string + TagID id.TagID +} + +type Tag interface { + Fetch(context.Context, []id.TagID, *usecase.Operator) ([]*tag.Tag, error) + FetchByScene(context.Context, id.SceneID, *usecase.Operator) ([]*tag.Tag, error) + FetchItem(context.Context, []id.TagID, *usecase.Operator) ([]*tag.Item, error) + FetchGroup(context.Context, []id.TagID, *usecase.Operator) ([]*tag.Group, error) + CreateItem(context.Context, CreateTagItemParam, *usecase.Operator) (*tag.Item, *tag.Group, error) + CreateGroup(context.Context, CreateTagGroupParam, *usecase.Operator) (*tag.Group, error) + AttachItemToGroup(context.Context, AttachItemToGroupParam, *usecase.Operator) (*tag.Group, error) + DetachItemFromGroup(context.Context, DetachItemToGroupParam, *usecase.Operator) (*tag.Group, error) + UpdateTag(context.Context, UpdateTagParam, *usecase.Operator) (*tag.Tag, error) + Remove(context.Context, id.TagID, *usecase.Operator) (*id.TagID, layer.List, error) +} diff --git a/server/internal/usecase/interfaces/team.go b/server/internal/usecase/interfaces/team.go new file mode 100644 index 000000000..503b07bb0 --- /dev/null +++ b/server/internal/usecase/interfaces/team.go @@ -0,0 +1,27 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +var ( + ErrOwnerCannotLeaveTheTeam = errors.New("owner user cannot leave from the team") + ErrCannotChangeOwnerRole = errors.New("cannot change the role of the team owner") + ErrCannotDeleteTeam = errors.New("cannot delete team because at least one project is left") +) + +type Team interface { + Fetch(context.Context, []id.TeamID, *usecase.Operator) ([]*user.Team, error) + FindByUser(context.Context, id.UserID, *usecase.Operator) ([]*user.Team, error) + Create(context.Context, string, id.UserID, *usecase.Operator) (*user.Team, error) + Update(context.Context, id.TeamID, string, *usecase.Operator) (*user.Team, error) + AddMember(context.Context, id.TeamID, id.UserID, user.Role, *usecase.Operator) (*user.Team, error) + RemoveMember(context.Context, id.TeamID, id.UserID, *usecase.Operator) (*user.Team, error) + UpdateMember(context.Context, id.TeamID, id.UserID, user.Role, *usecase.Operator) (*user.Team, error) + Remove(context.Context, id.TeamID, *usecase.Operator) error +} diff --git a/server/internal/usecase/interfaces/user.go b/server/internal/usecase/interfaces/user.go new file mode 100644 index 000000000..c3a743bc4 --- /dev/null +++ b/server/internal/usecase/interfaces/user.go @@ -0,0 +1,80 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/pkg/user" + "golang.org/x/text/language" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + ErrUserInvalidPasswordConfirmation = errors.New("invalid password confirmation") + ErrUserInvalidPasswordReset = errors.New("invalid password reset request") + ErrUserInvalidLang = errors.New("invalid lang") + ErrSignupInvalidSecret = errors.New("invalid secret") + ErrSignupInvalidName = errors.New("invalid name") + ErrInvalidUserEmail = errors.New("invalid email") + ErrNotVerifiedUser = errors.New("not verified user") + ErrSignupInvalidPassword = errors.New("invalid password") + ErrUserAlreadyExists = errors.New("user already exists") +) + +type SignupParam struct { + Sub *string // required by Auth0 + Email string + Name string + Password *string + Secret *string + User SignupUserParam +} + +type SignupOIDCParam struct { + AccessToken string + Issuer string + Sub string + Email string + Name string + Secret *string + User SignupUserParam +} + +type SignupUserParam struct { + UserID *id.UserID + Lang *language.Tag + Theme *user.Theme + TeamID *id.TeamID +} + +type GetUserByCredentials struct { + Email string + Password string +} + +type UpdateMeParam struct { + Name *string + Email *string + Lang *language.Tag + Theme *user.Theme + Password *string + PasswordConfirmation *string +} + +type User interface { + Fetch(context.Context, []id.UserID, *usecase.Operator) ([]*user.User, error) + Signup(context.Context, SignupParam) (*user.User, *user.Team, error) + SignupOIDC(context.Context, SignupOIDCParam) (*user.User, *user.Team, error) + CreateVerification(context.Context, string) error + VerifyUser(context.Context, string) (*user.User, error) + GetUserByCredentials(context.Context, GetUserByCredentials) (*user.User, error) + GetUserBySubject(context.Context, string) (*user.User, error) + StartPasswordReset(context.Context, string) error + PasswordReset(context.Context, string, string) error + UpdateMe(context.Context, UpdateMeParam, *usecase.Operator) (*user.User, error) + RemoveMyAuth(context.Context, string, *usecase.Operator) (*user.User, error) + SearchUser(context.Context, string, *usecase.Operator) (*user.User, error) + DeleteMe(context.Context, id.UserID, *usecase.Operator) error +} diff --git a/server/internal/usecase/operator.go b/server/internal/usecase/operator.go new file mode 100644 index 000000000..5da9a52a4 --- /dev/null +++ b/server/internal/usecase/operator.go @@ -0,0 +1,93 @@ +package usecase + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/user" +) + +type Operator struct { + User user.ID + ReadableTeams user.TeamIDList + WritableTeams user.TeamIDList + OwningTeams user.TeamIDList + ReadableScenes scene.IDList + WritableScenes scene.IDList + OwningScenes scene.IDList +} + +func (o *Operator) Teams(r user.Role) user.TeamIDList { + if o == nil { + return nil + } + if r == user.RoleReader { + return o.ReadableTeams + } + if r == user.RoleWriter { + return o.WritableTeams + } + if r == user.RoleOwner { + return o.OwningTeams + } + return nil +} + +func (o *Operator) AllReadableTeams() user.TeamIDList { + return append(o.ReadableTeams, o.AllWritableTeams()...) +} + +func (o *Operator) AllWritableTeams() user.TeamIDList { + return append(o.WritableTeams, o.AllOwningTeams()...) +} + +func (o *Operator) AllOwningTeams() user.TeamIDList { + return o.OwningTeams +} + +func (o *Operator) IsReadableTeam(team ...id.TeamID) bool { + return o.AllReadableTeams().Intersect(team).Len() > 0 +} + +func (o *Operator) IsWritableTeam(team ...id.TeamID) bool { + return o.AllWritableTeams().Intersect(team).Len() > 0 +} + +func (o *Operator) IsOwningTeam(team ...id.TeamID) bool { + return o.AllOwningTeams().Intersect(team).Len() > 0 +} + +func (o *Operator) AllReadableScenes() scene.IDList { + return append(o.ReadableScenes, o.AllWritableScenes()...) +} + +func (o *Operator) AllWritableScenes() scene.IDList { + return append(o.WritableScenes, o.AllOwningScenes()...) +} + +func (o *Operator) AllOwningScenes() scene.IDList { + return o.OwningScenes +} + +func (o *Operator) IsReadableScene(scene ...id.SceneID) bool { + return o.AllReadableScenes().Has(scene...) +} + +func (o *Operator) IsWritableScene(scene ...id.SceneID) bool { + return o.AllWritableScenes().Has(scene...) +} + +func (o *Operator) IsOwningScene(scene ...id.SceneID) bool { + return o.AllOwningScenes().Has(scene...) +} + +func (o *Operator) AddNewTeam(team id.TeamID) { + o.OwningTeams = append(o.OwningTeams, team) +} + +func (o *Operator) AddNewScene(team id.TeamID, scene id.SceneID) { + if o.IsOwningTeam(team) { + o.OwningScenes = append(o.OwningScenes, scene) + } else if o.IsWritableTeam(team) { + o.WritableScenes = append(o.WritableScenes, scene) + } +} diff --git a/server/internal/usecase/pageinfo.go b/server/internal/usecase/pageinfo.go new file mode 100644 index 000000000..53fa8f5b6 --- /dev/null +++ b/server/internal/usecase/pageinfo.go @@ -0,0 +1,73 @@ +package usecase + +type PageInfo struct { + totalCount int + startCursor *Cursor + endCursor *Cursor + hasNextPage bool + hasPreviousPage bool +} + +func NewPageInfo(totalCount int, startCursor *Cursor, endCursor *Cursor, hasNextPage bool, hasPreviousPage bool) *PageInfo { + var sc Cursor + var ec Cursor + if startCursor != nil { + sc = *startCursor + } + if endCursor != nil { + ec = *endCursor + } + + return &PageInfo{ + totalCount: totalCount, + startCursor: &sc, + endCursor: &ec, + hasNextPage: hasNextPage, + hasPreviousPage: hasPreviousPage, + } +} + +func EmptyPageInfo() *PageInfo { + return &PageInfo{ + totalCount: 0, + startCursor: nil, + endCursor: nil, + hasNextPage: false, + hasPreviousPage: false, + } +} + +func (p *PageInfo) TotalCount() int { + if p == nil { + return 0 + } + return p.totalCount +} + +func (p *PageInfo) StartCursor() *Cursor { + if p == nil { + return nil + } + return p.startCursor +} + +func (p *PageInfo) EndCursor() *Cursor { + if p == nil { + return nil + } + return p.endCursor +} + +func (p *PageInfo) HasNextPage() bool { + if p == nil { + return false + } + return p.hasNextPage +} + +func (p *PageInfo) HasPreviousPage() bool { + if p == nil { + return false + } + return p.hasPreviousPage +} diff --git a/server/internal/usecase/pagination.go b/server/internal/usecase/pagination.go new file mode 100644 index 000000000..e6ec0645b --- /dev/null +++ b/server/internal/usecase/pagination.go @@ -0,0 +1,21 @@ +package usecase + +type Cursor string + +type Pagination struct { + Before *Cursor + After *Cursor + First *int + Last *int +} + +func NewPagination(first *int, last *int, before *Cursor, after *Cursor) *Pagination { + // Relay-Style Cursor Pagination + // ref: https://www.apollographql.com/docs/react/features/pagination/#relay-style-cursor-pagination + return &Pagination{ + Before: before, + After: after, + First: first, + Last: last, + } +} diff --git a/server/internal/usecase/repo/asset.go b/server/internal/usecase/repo/asset.go new file mode 100644 index 000000000..a01e5016d --- /dev/null +++ b/server/internal/usecase/repo/asset.go @@ -0,0 +1,24 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" +) + +type AssetFilter struct { + Sort *asset.SortType + Keyword *string + Pagination *usecase.Pagination +} + +type Asset interface { + Filtered(TeamFilter) Asset + FindByTeam(context.Context, id.TeamID, AssetFilter) ([]*asset.Asset, *usecase.PageInfo, error) + FindByID(context.Context, id.AssetID) (*asset.Asset, error) + FindByIDs(context.Context, id.AssetIDList) ([]*asset.Asset, error) + Save(context.Context, *asset.Asset) error + Remove(context.Context, id.AssetID) error +} diff --git a/server/internal/usecase/repo/auth_request.go b/server/internal/usecase/repo/auth_request.go new file mode 100644 index 000000000..378926bb2 --- /dev/null +++ b/server/internal/usecase/repo/auth_request.go @@ -0,0 +1,16 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/auth" + "github.com/reearth/reearth-backend/pkg/id" +) + +type AuthRequest interface { + FindByID(context.Context, id.AuthRequestID) (*auth.Request, error) + FindByCode(context.Context, string) (*auth.Request, error) + FindBySubject(context.Context, string) (*auth.Request, error) + Save(context.Context, *auth.Request) error + Remove(context.Context, id.AuthRequestID) error +} diff --git a/server/internal/usecase/repo/config.go b/server/internal/usecase/repo/config.go new file mode 100644 index 000000000..fed54def3 --- /dev/null +++ b/server/internal/usecase/repo/config.go @@ -0,0 +1,14 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/config" +) + +type Config interface { + LockAndLoad(context.Context) (*config.Config, error) + Save(context.Context, *config.Config) error + SaveAndUnlock(context.Context, *config.Config) error + Unlock(context.Context) error +} diff --git a/server/internal/usecase/repo/container.go b/server/internal/usecase/repo/container.go new file mode 100644 index 000000000..7f42d17ce --- /dev/null +++ b/server/internal/usecase/repo/container.go @@ -0,0 +1,156 @@ +package repo + +import ( + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/user" +) + +var ( + ErrOperationDenied = errors.New("operation denied") +) + +type Container struct { + Asset Asset + AuthRequest AuthRequest + Config Config + DatasetSchema DatasetSchema + Dataset Dataset + Layer Layer + Lock Lock + Plugin Plugin + Project Project + PropertySchema PropertySchema + Property Property + Scene Scene + SceneLock SceneLock + Tag Tag + Team Team + Transaction Transaction + User User +} + +func (c *Container) Filtered(team TeamFilter, scene SceneFilter) *Container { + if c == nil { + return c + } + return &Container{ + Asset: c.Asset.Filtered(team), + AuthRequest: c.AuthRequest, + Config: c.Config, + DatasetSchema: c.DatasetSchema.Filtered(scene), + Dataset: c.Dataset.Filtered(scene), + Layer: c.Layer.Filtered(scene), + Lock: c.Lock, + Plugin: c.Plugin.Filtered(scene), + Project: c.Project.Filtered(team), + PropertySchema: c.PropertySchema.Filtered(scene), + Property: c.Property.Filtered(scene), + Scene: c.Scene.Filtered(team), + SceneLock: c.SceneLock, + Tag: c.Tag.Filtered(scene), + Team: c.Team, + Transaction: c.Transaction, + User: c.User, + } +} + +type TeamFilter struct { + Readable user.TeamIDList + Writable user.TeamIDList +} + +func TeamFilterFromOperator(o *usecase.Operator) TeamFilter { + return TeamFilter{ + Readable: o.AllReadableTeams(), + Writable: o.AllWritableTeams(), + } +} + +func (f TeamFilter) Clone() TeamFilter { + return TeamFilter{ + Readable: f.Readable.Clone(), + Writable: f.Writable.Clone(), + } +} + +func (f TeamFilter) Merge(g TeamFilter) TeamFilter { + var r, w user.TeamIDList + if f.Readable != nil || g.Readable != nil { + if f.Readable == nil { + r = append(g.Readable[:0:0], g.Readable...) + } else { + r = append(f.Readable, g.Readable...) + } + } + if f.Writable != nil || g.Writable != nil { + if f.Writable == nil { + w = append(g.Writable[:0:0], g.Writable...) + } else { + w = append(f.Writable, g.Writable...) + } + } + return TeamFilter{ + Readable: r, + Writable: w, + } +} + +func (f TeamFilter) CanRead(id user.TeamID) bool { + return f.Readable == nil || f.Readable.Has(id) +} + +func (f TeamFilter) CanWrite(id user.TeamID) bool { + return f.Writable == nil || f.Writable.Has(id) +} + +type SceneFilter struct { + Readable scene.IDList + Writable scene.IDList +} + +func SceneFilterFromOperator(o *usecase.Operator) SceneFilter { + return SceneFilter{ + Readable: o.AllReadableScenes(), + Writable: o.AllWritableScenes(), + } +} + +func (f SceneFilter) Merge(g SceneFilter) SceneFilter { + var r, w scene.IDList + if f.Readable != nil || g.Readable != nil { + if f.Readable == nil { + r = append(g.Readable[:0:0], g.Readable...) + } else { + r = append(f.Readable, g.Readable...) + } + } + if f.Writable != nil || g.Writable != nil { + if f.Writable == nil { + w = append(g.Writable[:0:0], g.Writable...) + } else { + w = append(f.Writable, g.Writable...) + } + } + return SceneFilter{ + Readable: r, + Writable: w, + } +} + +func (f SceneFilter) Clone() SceneFilter { + return SceneFilter{ + Readable: f.Readable.Clone(), + Writable: f.Writable.Clone(), + } +} + +func (f SceneFilter) CanRead(id scene.ID) bool { + return f.Readable == nil || f.Readable.Has(id) +} + +func (f SceneFilter) CanWrite(id scene.ID) bool { + return f.Writable == nil || f.Writable.Has(id) +} diff --git a/server/internal/usecase/repo/container_test.go b/server/internal/usecase/repo/container_test.go new file mode 100644 index 000000000..6b60ce97d --- /dev/null +++ b/server/internal/usecase/repo/container_test.go @@ -0,0 +1,47 @@ +package repo + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/user" + "github.com/stretchr/testify/assert" +) + +func TestTeamFilter_Merge(t *testing.T) { + a := user.NewTeamID() + b := user.NewTeamID() + assert.Equal(t, TeamFilter{ + Readable: user.TeamIDList{a, b}, + Writable: user.TeamIDList{b, a}, + }, TeamFilter{ + Readable: user.TeamIDList{a}, + Writable: user.TeamIDList{b}, + }.Merge(TeamFilter{ + Readable: user.TeamIDList{b}, + Writable: user.TeamIDList{a}, + })) + assert.Equal(t, TeamFilter{Readable: user.TeamIDList{}}, TeamFilter{}.Merge(TeamFilter{Readable: user.TeamIDList{}})) + assert.Equal(t, TeamFilter{Readable: user.TeamIDList{}}, TeamFilter{Readable: user.TeamIDList{}}.Merge(TeamFilter{})) + assert.Equal(t, TeamFilter{Writable: user.TeamIDList{}}, TeamFilter{}.Merge(TeamFilter{Writable: user.TeamIDList{}})) + assert.Equal(t, TeamFilter{Writable: user.TeamIDList{}}, TeamFilter{Writable: user.TeamIDList{}}.Merge(TeamFilter{})) +} + +func TestSceneFilter_Merge(t *testing.T) { + a := scene.NewID() + b := scene.NewID() + assert.Equal(t, SceneFilter{ + Readable: scene.IDList{a, b}, + Writable: scene.IDList{b, a}, + }, SceneFilter{ + Readable: scene.IDList{a}, + Writable: scene.IDList{b}, + }.Merge(SceneFilter{ + Readable: scene.IDList{b}, + Writable: scene.IDList{a}, + })) + assert.Equal(t, SceneFilter{Readable: scene.IDList{}}, SceneFilter{}.Merge(SceneFilter{Readable: scene.IDList{}})) + assert.Equal(t, SceneFilter{Readable: scene.IDList{}}, SceneFilter{Readable: scene.IDList{}}.Merge(SceneFilter{})) + assert.Equal(t, SceneFilter{Writable: scene.IDList{}}, SceneFilter{}.Merge(SceneFilter{Writable: scene.IDList{}})) + assert.Equal(t, SceneFilter{Writable: scene.IDList{}}, SceneFilter{Writable: scene.IDList{}}.Merge(SceneFilter{})) +} diff --git a/server/internal/usecase/repo/dataset.go b/server/internal/usecase/repo/dataset.go new file mode 100644 index 000000000..afcc41f0b --- /dev/null +++ b/server/internal/usecase/repo/dataset.go @@ -0,0 +1,52 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +type Dataset interface { + Filtered(SceneFilter) Dataset + FindByID(context.Context, id.DatasetID) (*dataset.Dataset, error) + FindByIDs(context.Context, id.DatasetIDList) (dataset.List, error) + FindBySchema(context.Context, id.DatasetSchemaID, *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) + CountBySchema(context.Context, id.DatasetSchemaID) (int, error) + FindBySchemaAll(context.Context, id.DatasetSchemaID) (dataset.List, error) + FindGraph(context.Context, id.DatasetID, id.DatasetFieldIDList) (dataset.List, error) + Save(context.Context, *dataset.Dataset) error + SaveAll(context.Context, dataset.List) error + Remove(context.Context, id.DatasetID) error + RemoveAll(context.Context, id.DatasetIDList) error + RemoveByScene(context.Context, id.SceneID) error +} + +func DatasetLoaderFrom(r Dataset) dataset.Loader { + return func(ctx context.Context, ids ...id.DatasetID) (dataset.List, error) { + return r.FindByIDs(ctx, ids) + } +} + +func DatasetGraphLoaderFrom(r Dataset) dataset.GraphLoader { + return func(ctx context.Context, root id.DatasetID, fields ...id.DatasetFieldID) (dataset.List, *dataset.Field, error) { + if len(fields) <= 1 { + d, err := r.FindByID(ctx, root) + if err != nil { + return nil, nil, err + } + var field *dataset.Field + if len(fields) == 1 { + field = d.Field(fields[0]) + } + return dataset.List{d}, field, nil + } + + list2, err := r.FindGraph(ctx, root, fields) + if err != nil { + return nil, nil, err + } + return list2, list2.Last().Field(fields[len(fields)-1]), nil + } +} diff --git a/server/internal/usecase/repo/dataset_schema.go b/server/internal/usecase/repo/dataset_schema.go new file mode 100644 index 000000000..fe99bfb64 --- /dev/null +++ b/server/internal/usecase/repo/dataset_schema.go @@ -0,0 +1,25 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +type DatasetSchema interface { + Filtered(SceneFilter) DatasetSchema + FindByID(context.Context, id.DatasetSchemaID) (*dataset.Schema, error) + FindByIDs(context.Context, id.DatasetSchemaIDList) (dataset.SchemaList, error) + FindByScene(context.Context, id.SceneID, *usecase.Pagination) (dataset.SchemaList, *usecase.PageInfo, error) + FindBySceneAll(context.Context, id.SceneID) (dataset.SchemaList, error) + FindBySceneAndSource(context.Context, id.SceneID, string) (dataset.SchemaList, error) + FindDynamicByID(context.Context, id.DatasetSchemaID) (*dataset.Schema, error) + FindAllDynamicByScene(context.Context, id.SceneID) (dataset.SchemaList, error) + Save(context.Context, *dataset.Schema) error + SaveAll(context.Context, dataset.SchemaList) error + Remove(context.Context, id.DatasetSchemaID) error + RemoveAll(context.Context, id.DatasetSchemaIDList) error + RemoveByScene(context.Context, id.SceneID) error +} diff --git a/server/internal/usecase/repo/layer.go b/server/internal/usecase/repo/layer.go new file mode 100644 index 000000000..46870661f --- /dev/null +++ b/server/internal/usecase/repo/layer.go @@ -0,0 +1,45 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" +) + +type Layer interface { + Filtered(SceneFilter) Layer + FindByID(context.Context, id.LayerID) (layer.Layer, error) + FindByIDs(context.Context, id.LayerIDList) (layer.List, error) + FindItemByID(context.Context, id.LayerID) (*layer.Item, error) + FindItemByIDs(context.Context, id.LayerIDList) (layer.ItemList, error) + FindAllByDatasetSchema(context.Context, id.DatasetSchemaID) (layer.List, error) + FindGroupByID(context.Context, id.LayerID) (*layer.Group, error) + FindGroupByIDs(context.Context, id.LayerIDList) (layer.GroupList, error) + FindGroupBySceneAndLinkedDatasetSchema(context.Context, id.SceneID, id.DatasetSchemaID) (layer.GroupList, error) + FindParentByID(context.Context, id.LayerID) (*layer.Group, error) + FindParentsByIDs(context.Context, id.LayerIDList) (layer.GroupList, error) + FindByPluginAndExtension(context.Context, id.PluginID, *id.PluginExtensionID) (layer.List, error) + FindByPluginAndExtensionOfBlocks(context.Context, id.PluginID, *id.PluginExtensionID) (layer.List, error) + FindByProperty(context.Context, id.PropertyID) (layer.Layer, error) + FindByScene(context.Context, id.SceneID) (layer.List, error) + FindByTag(context.Context, id.TagID) (layer.List, error) + Save(context.Context, layer.Layer) error + SaveAll(context.Context, layer.List) error + UpdatePlugin(context.Context, id.PluginID, id.PluginID) error + Remove(context.Context, id.LayerID) error + RemoveAll(context.Context, id.LayerIDList) error + RemoveByScene(context.Context, id.SceneID) error +} + +func LayerLoaderFrom(r Layer) layer.Loader { + return func(ctx context.Context, ids ...id.LayerID) (layer.List, error) { + return r.FindByIDs(ctx, ids) + } +} + +func LayerLoaderBySceneFrom(r Layer) layer.LoaderByScene { + return func(ctx context.Context, s id.SceneID) (layer.List, error) { + return r.FindByScene(ctx, s) + } +} diff --git a/server/internal/usecase/repo/lock.go b/server/internal/usecase/repo/lock.go new file mode 100644 index 000000000..645ae1445 --- /dev/null +++ b/server/internal/usecase/repo/lock.go @@ -0,0 +1,17 @@ +package repo + +import ( + "context" + "errors" +) + +var ( + ErrFailedToLock = errors.New("failed to lock") + ErrAlreadyLocked = errors.New("already locked") + ErrNotLocked = errors.New("not locked") +) + +type Lock interface { + Lock(context.Context, string) error + Unlock(context.Context, string) error +} diff --git a/server/internal/usecase/repo/plugin.go b/server/internal/usecase/repo/plugin.go new file mode 100644 index 000000000..f30d2f8dc --- /dev/null +++ b/server/internal/usecase/repo/plugin.go @@ -0,0 +1,22 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +type Plugin interface { + Filtered(SceneFilter) Plugin + FindByID(context.Context, id.PluginID) (*plugin.Plugin, error) + FindByIDs(context.Context, []id.PluginID) ([]*plugin.Plugin, error) + Save(context.Context, *plugin.Plugin) error + Remove(context.Context, id.PluginID) error +} + +func PluginLoaderFrom(r Plugin) plugin.Loader { + return func(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { + return r.FindByIDs(ctx, ids) + } +} diff --git a/server/internal/usecase/repo/project.go b/server/internal/usecase/repo/project.go new file mode 100644 index 000000000..f65a1b6d8 --- /dev/null +++ b/server/internal/usecase/repo/project.go @@ -0,0 +1,47 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" +) + +type Project interface { + Filtered(TeamFilter) Project + FindByIDs(context.Context, id.ProjectIDList) ([]*project.Project, error) + FindByID(context.Context, id.ProjectID) (*project.Project, error) + FindByTeam(context.Context, id.TeamID, *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) + FindByPublicName(context.Context, string) (*project.Project, error) + CountByTeam(context.Context, id.TeamID) (int, error) + Save(context.Context, *project.Project) error + Remove(context.Context, id.ProjectID) error +} + +func IterateProjectsByTeam(repo Project, ctx context.Context, tid id.TeamID, batch int, callback func([]*project.Project) error) error { + pagination := usecase.NewPagination(&batch, nil, nil, nil) + + for { + projects, info, err := repo.FindByTeam(ctx, tid, pagination) + if err != nil { + return err + } + if len(projects) == 0 { + break + } + + if err := callback(projects); err != nil { + return err + } + + if !info.HasNextPage() { + break + } + + c := usecase.Cursor(projects[len(projects)-1].ID().String()) + pagination.After = &c + } + + return nil +} diff --git a/server/internal/usecase/repo/property.go b/server/internal/usecase/repo/property.go new file mode 100644 index 000000000..c614db033 --- /dev/null +++ b/server/internal/usecase/repo/property.go @@ -0,0 +1,30 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Property interface { + Filtered(SceneFilter) Property + FindByID(context.Context, id.PropertyID) (*property.Property, error) + FindByIDs(context.Context, id.PropertyIDList) (property.List, error) + FindLinkedAll(context.Context, id.SceneID) (property.List, error) + FindByDataset(context.Context, id.DatasetSchemaID, id.DatasetID) (property.List, error) + FindBySchema(context.Context, []id.PropertySchemaID, id.SceneID) (property.List, error) + FindByPlugin(context.Context, id.PluginID, id.SceneID) (property.List, error) + Save(context.Context, *property.Property) error + SaveAll(context.Context, property.List) error + UpdateSchemaPlugin(context.Context, id.PluginID, id.PluginID, id.SceneID) error + Remove(context.Context, id.PropertyID) error + RemoveAll(context.Context, id.PropertyIDList) error + RemoveByScene(context.Context, id.SceneID) error +} + +func PropertyLoaderFrom(r Property) property.Loader { + return func(ctx context.Context, ids ...id.PropertyID) (property.List, error) { + return r.FindByIDs(ctx, ids) + } +} diff --git a/server/internal/usecase/repo/property_schema.go b/server/internal/usecase/repo/property_schema.go new file mode 100644 index 000000000..6911af050 --- /dev/null +++ b/server/internal/usecase/repo/property_schema.go @@ -0,0 +1,24 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type PropertySchema interface { + Filtered(SceneFilter) PropertySchema + FindByID(context.Context, id.PropertySchemaID) (*property.Schema, error) + FindByIDs(context.Context, []id.PropertySchemaID) (property.SchemaList, error) + Save(context.Context, *property.Schema) error + SaveAll(context.Context, property.SchemaList) error + Remove(context.Context, id.PropertySchemaID) error + RemoveAll(context.Context, []id.PropertySchemaID) error +} + +func PropertySchemaLoaderFrom(r PropertySchema) property.SchemaLoader { + return func(ctx context.Context, ids ...id.PropertySchemaID) (property.SchemaList, error) { + return r.FindByIDs(ctx, ids) + } +} diff --git a/server/internal/usecase/repo/scene.go b/server/internal/usecase/repo/scene.go new file mode 100644 index 000000000..236f27708 --- /dev/null +++ b/server/internal/usecase/repo/scene.go @@ -0,0 +1,18 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type Scene interface { + Filtered(TeamFilter) Scene + FindByID(context.Context, id.SceneID) (*scene.Scene, error) + FindByIDs(context.Context, id.SceneIDList) (scene.List, error) + FindByTeam(context.Context, ...id.TeamID) (scene.List, error) + FindByProject(context.Context, id.ProjectID) (*scene.Scene, error) + Save(context.Context, *scene.Scene) error + Remove(context.Context, id.SceneID) error +} diff --git a/server/internal/usecase/repo/scene_lock.go b/server/internal/usecase/repo/scene_lock.go new file mode 100644 index 000000000..b8397a68b --- /dev/null +++ b/server/internal/usecase/repo/scene_lock.go @@ -0,0 +1,15 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type SceneLock interface { + GetLock(context.Context, id.SceneID) (scene.LockMode, error) + GetAllLock(context.Context, id.SceneIDList) ([]scene.LockMode, error) + SaveLock(context.Context, id.SceneID, scene.LockMode) error + ReleaseAllLock(context.Context) error +} diff --git a/server/internal/usecase/repo/tag.go b/server/internal/usecase/repo/tag.go new file mode 100644 index 000000000..343a283ce --- /dev/null +++ b/server/internal/usecase/repo/tag.go @@ -0,0 +1,48 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" +) + +type Tag interface { + Filtered(SceneFilter) Tag + FindByID(context.Context, id.TagID) (tag.Tag, error) + FindByIDs(context.Context, id.TagIDList) ([]*tag.Tag, error) + FindByScene(context.Context, id.SceneID) ([]*tag.Tag, error) + FindItemByID(context.Context, id.TagID) (*tag.Item, error) + FindItemByIDs(context.Context, id.TagIDList) ([]*tag.Item, error) + FindGroupByID(context.Context, id.TagID) (*tag.Group, error) + FindGroupByIDs(context.Context, id.TagIDList) ([]*tag.Group, error) + FindRootsByScene(context.Context, id.SceneID) ([]*tag.Tag, error) + FindGroupByItem(context.Context, id.TagID) (*tag.Group, error) + Save(context.Context, tag.Tag) error + SaveAll(context.Context, []*tag.Tag) error + Remove(context.Context, id.TagID) error + RemoveAll(context.Context, id.TagIDList) error + RemoveByScene(context.Context, id.SceneID) error +} + +func TagLoaderFrom(r Tag) tag.Loader { + return func(ctx context.Context, ids ...id.TagID) ([]*tag.Tag, error) { + return r.FindByIDs(ctx, ids) + } +} + +func TagSceneLoaderFrom(r Tag, scenes []id.SceneID) tag.SceneLoader { + return func(ctx context.Context, id id.SceneID) ([]*tag.Tag, error) { + found := false + for _, s := range scenes { + if id == s { + found = true + break + } + } + if !found { + return nil, nil + } + return r.FindByScene(ctx, id) + } +} diff --git a/server/internal/usecase/repo/team.go b/server/internal/usecase/repo/team.go new file mode 100644 index 000000000..8ac266224 --- /dev/null +++ b/server/internal/usecase/repo/team.go @@ -0,0 +1,18 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +type Team interface { + FindByUser(context.Context, id.UserID) (user.TeamList, error) + FindByIDs(context.Context, id.TeamIDList) (user.TeamList, error) + FindByID(context.Context, id.TeamID) (*user.Team, error) + Save(context.Context, *user.Team) error + SaveAll(context.Context, []*user.Team) error + Remove(context.Context, id.TeamID) error + RemoveAll(context.Context, id.TeamIDList) error +} diff --git a/server/internal/usecase/repo/transaction.go b/server/internal/usecase/repo/transaction.go new file mode 100644 index 000000000..6627b24d6 --- /dev/null +++ b/server/internal/usecase/repo/transaction.go @@ -0,0 +1,17 @@ +package repo + +import "context" + +type Transaction interface { + Begin() (Tx, error) +} + +type Tx interface { + // Commit informs Tx to commit when End() is called. + // If this was not called once, rollback is done when End() is called. + Commit() + // End finishes the transaction and do commit if Commit() was called once, or else do rollback. + // This method is supposed to be called in the uscase layer using defer. + End(context.Context) error + IsCommitted() bool +} diff --git a/server/internal/usecase/repo/user.go b/server/internal/usecase/repo/user.go new file mode 100644 index 000000000..9ebb545be --- /dev/null +++ b/server/internal/usecase/repo/user.go @@ -0,0 +1,21 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +type User interface { + FindByIDs(context.Context, id.UserIDList) ([]*user.User, error) + FindByID(context.Context, id.UserID) (*user.User, error) + FindByAuth0Sub(context.Context, string) (*user.User, error) + FindByEmail(context.Context, string) (*user.User, error) + FindByName(context.Context, string) (*user.User, error) + FindByNameOrEmail(context.Context, string) (*user.User, error) + FindByVerification(context.Context, string) (*user.User, error) + FindByPasswordResetRequest(context.Context, string) (*user.User, error) + Save(context.Context, *user.User) error + Remove(context.Context, id.UserID) error +} diff --git a/server/main.go b/server/main.go new file mode 100644 index 000000000..38dd16da6 --- /dev/null +++ b/server/main.go @@ -0,0 +1,3 @@ +package main + +func main() {} diff --git a/server/pkg/asset/asset.go b/server/pkg/asset/asset.go new file mode 100644 index 000000000..2f04f913a --- /dev/null +++ b/server/pkg/asset/asset.go @@ -0,0 +1,53 @@ +package asset + +import ( + "errors" + "time" +) + +var ( + ErrEmptyTeamID = errors.New("require team id") + ErrEmptyURL = errors.New("require valid url") + ErrEmptySize = errors.New("file size cannot be zero") +) + +type Asset struct { + id ID + createdAt time.Time + team TeamID + name string // file name + size int64 // file size + url string + contentType string +} + +func (a *Asset) ID() ID { + return a.id +} + +func (a *Asset) Team() TeamID { + return a.team +} + +func (a *Asset) Name() string { + return a.name +} + +func (a *Asset) Size() int64 { + return a.size +} + +func (a *Asset) URL() string { + return a.url +} + +func (a *Asset) ContentType() string { + return a.contentType +} + +func (a *Asset) CreatedAt() time.Time { + if a == nil { + return time.Time{} + } + return a.id.Timestamp() +} diff --git a/server/pkg/asset/asset_test.go b/server/pkg/asset/asset_test.go new file mode 100644 index 000000000..3863f4c95 --- /dev/null +++ b/server/pkg/asset/asset_test.go @@ -0,0 +1,64 @@ +package asset + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestAsset(t *testing.T) { + aid := NewID() + tid := NewTeamID() + d := aid.Timestamp() + + tests := []struct { + Name string + Expected struct { + ID ID + CreatedAt time.Time + Team TeamID + Name string + Size int64 + Url string + ContentType string + } + Actual *Asset + }{ + { + Expected: struct { + ID ID + CreatedAt time.Time + Team TeamID + Name string + Size int64 + Url string + ContentType string + }{ + ID: aid, + CreatedAt: d, + Team: tid, + Size: 10, + Url: "tt://xxx.xx", + Name: "xxx", + ContentType: "test", + }, + Actual: New().ID(aid).CreatedAt(d).ContentType("test").Team(tid).Size(10).Name("xxx").URL("tt://xxx.xx").MustBuild(), + }, + } + + for _, tc := range tests { + tc := tc + + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected.ID, tc.Actual.ID()) + assert.Equal(t, tc.Expected.CreatedAt, tc.Actual.CreatedAt()) + assert.Equal(t, tc.Expected.Team, tc.Actual.Team()) + assert.Equal(t, tc.Expected.Url, tc.Actual.URL()) + assert.Equal(t, tc.Expected.Size, tc.Actual.Size()) + assert.Equal(t, tc.Expected.Name, tc.Actual.Name()) + assert.Equal(t, tc.Expected.ContentType, tc.Actual.ContentType()) + }) + } +} diff --git a/server/pkg/asset/builder.go b/server/pkg/asset/builder.go new file mode 100644 index 000000000..48f88e4ef --- /dev/null +++ b/server/pkg/asset/builder.go @@ -0,0 +1,80 @@ +package asset + +import ( + "time" +) + +type Builder struct { + a *Asset +} + +func New() *Builder { + return &Builder{a: &Asset{}} +} + +func (b *Builder) Build() (*Asset, error) { + if b.a.id.IsNil() { + return nil, ErrInvalidID + } + if b.a.team.IsNil() { + return nil, ErrEmptyTeamID + } + if b.a.url == "" { + return nil, ErrEmptyURL + } + if b.a.size <= 0 { + return nil, ErrEmptySize + } + if b.a.createdAt.IsZero() { + b.a.createdAt = b.a.CreatedAt() + } + return b.a, nil +} + +func (b *Builder) MustBuild() *Asset { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *Builder) ID(id ID) *Builder { + b.a.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.a.id = NewID() + return b +} + +func (b *Builder) Team(team TeamID) *Builder { + b.a.team = team + return b +} + +func (b *Builder) Name(name string) *Builder { + b.a.name = name + return b +} + +func (b *Builder) Size(size int64) *Builder { + b.a.size = size + return b +} + +func (b *Builder) URL(url string) *Builder { + b.a.url = url + return b +} + +func (b *Builder) ContentType(contentType string) *Builder { + b.a.contentType = contentType + return b +} + +func (b *Builder) CreatedAt(createdAt time.Time) *Builder { + b.a.createdAt = createdAt + return b +} diff --git a/server/pkg/asset/builder_test.go b/server/pkg/asset/builder_test.go new file mode 100644 index 000000000..d0523dd0d --- /dev/null +++ b/server/pkg/asset/builder_test.go @@ -0,0 +1,248 @@ +package asset + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestBuilder_Build(t *testing.T) { + aid := NewID() + tid := NewTeamID() + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + + type args struct { + id ID + name string + createdAt time.Time + team TeamID + size int64 + url string + contentType string + } + + tests := []struct { + name string + args args + expected *Asset + err error + }{ + { + name: "Valid asset", + args: args{ + createdAt: d, + id: aid, + team: tid, + name: "xxx", + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + }, + expected: &Asset{ + id: aid, + createdAt: d, + team: tid, + size: 10, + name: "xxx", + url: "tt://xxx.zz", + contentType: "bbb", + }, + }, + { + name: "failed empty size", + args: args{ + id: NewID(), + createdAt: d, + team: NewTeamID(), + size: 0, + url: "tt://xxx.zz", + contentType: "bbb", + }, + err: ErrEmptySize, + }, + { + name: "failed empty url", + args: args{ + id: NewID(), + createdAt: d, + team: NewTeamID(), + size: 10, + url: "", + contentType: "bbb", + }, + err: ErrEmptyURL, + }, + { + name: "failed empty team", + args: args{ + id: NewID(), + createdAt: d, + team: TeamID{}, + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + }, + err: ErrEmptyTeamID, + }, + { + name: "failed invalid Id", + args: args{ + id: ID{}, + createdAt: d, + team: NewTeamID(), + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + }, + err: ErrInvalidID, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res, err := New(). + ID(tt.args.id). + CreatedAt(tt.args.createdAt). + Name(tt.args.name). + Size(tt.args.size). + Team(tt.args.team). + ContentType(tt.args.contentType). + URL(tt.args.url). + Build() + if tt.err == nil { + assert.Equal(t, tt.expected, res) + assert.Nil(t, err) + } else { + assert.Nil(t, res) + assert.Equal(t, tt.err, err) + } + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + aid := NewID() + tid := NewTeamID() + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + + type args struct { + id ID + name string + createdAt time.Time + team TeamID + size int64 + url string + contentType string + } + + tests := []struct { + name string + args args + expected *Asset + err error + }{ + { + name: "Valid asset", + args: args{ + createdAt: d, + id: aid, + team: tid, + name: "xxx", + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + }, + expected: &Asset{ + id: aid, + createdAt: d, + team: tid, + size: 10, + name: "xxx", + url: "tt://xxx.zz", + contentType: "bbb", + }, + }, + { + name: "failed empty size", + args: args{ + createdAt: d, + id: NewID(), + team: NewTeamID(), + size: 0, + url: "tt://xxx.zz", + contentType: "bbb", + }, + err: ErrEmptySize, + }, + { + name: "failed empty url", + args: args{ + createdAt: d, + id: NewID(), + team: NewTeamID(), + size: 10, + url: "", + contentType: "bbb", + }, + err: ErrEmptyURL, + }, + { + name: "failed empty team", + args: args{ + createdAt: d, + id: NewID(), + team: TeamID{}, + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + }, + err: ErrEmptyTeamID, + }, + { + name: "failed invalid Id", + args: args{ + createdAt: d, + id: ID{}, + team: NewTeamID(), + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + }, + err: ErrInvalidID, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + build := func() *Asset { + t.Helper() + return New(). + ID(tt.args.id). + CreatedAt(tt.args.createdAt). + Name(tt.args.name). + Size(tt.args.size). + Team(tt.args.team). + ContentType(tt.args.contentType). + URL(tt.args.url). + MustBuild() + } + + if tt.err != nil { + assert.PanicsWithValue(t, tt.err, func() { _ = build() }) + } else { + assert.Equal(t, tt.expected, build()) + } + }) + } +} + +func TestNewID(t *testing.T) { + a := New().NewID().URL("tt://xxx.bb").Team(NewTeamID()).Size(10).MustBuild() + assert.False(t, a.id.IsNil()) +} diff --git a/server/pkg/asset/id.go b/server/pkg/asset/id.go new file mode 100644 index 000000000..8fb4f56cc --- /dev/null +++ b/server/pkg/asset/id.go @@ -0,0 +1,22 @@ +package asset + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +type ID = id.AssetID +type TeamID = id.TeamID + +var NewID = id.NewAssetID +var NewTeamID = id.NewTeamID + +var MustID = id.MustAssetID +var MustTeamID = id.MustTeamID + +var IDFrom = id.AssetIDFrom +var TeamIDFrom = id.TeamIDFrom + +var IDFromRef = id.AssetIDFromRef +var TeamIDFromRef = id.TeamIDFromRef + +var ErrInvalidID = id.ErrInvalidID diff --git a/server/pkg/asset/sort_type.go b/server/pkg/asset/sort_type.go new file mode 100644 index 000000000..ab44259a0 --- /dev/null +++ b/server/pkg/asset/sort_type.go @@ -0,0 +1,37 @@ +package asset + +import ( + "errors" + "strings" +) + +var ( + SortTypeID = SortType("id") + SortTypeName = SortType("name") + SortTypeSize = SortType("size") + + ErrInvalidSortType = errors.New("invalid sort type") +) + +type SortType string + +func check(role SortType) bool { + switch role { + case SortTypeID: + return true + case SortTypeName: + return true + case SortTypeSize: + return true + } + return false +} + +func SortTypeFromString(r string) (SortType, error) { + role := SortType(strings.ToLower(r)) + + if check(role) { + return role, nil + } + return role, ErrInvalidSortType +} diff --git a/server/pkg/asset/sort_type_test.go b/server/pkg/asset/sort_type_test.go new file mode 100644 index 000000000..4e5cf49c8 --- /dev/null +++ b/server/pkg/asset/sort_type_test.go @@ -0,0 +1,79 @@ +package asset + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSortTypeFromString(t *testing.T) { + tests := []struct { + Name, Role string + Expected SortType + Err error + }{ + { + Name: "Success id", + Role: "id", + Expected: SortType("id"), + Err: nil, + }, + { + Name: "fail invalid sort type", + Role: "xxx", + Expected: SortType("xxx"), + Err: ErrInvalidSortType, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := SortTypeFromString(tt.Role) + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) + } else { + assert.Equal(t, tt.Err, err) + } + }) + } +} + +func TestCheck(t *testing.T) { + tests := []struct { + Name string + Input SortType + Expected bool + }{ + { + Name: "check id", + Input: SortType("id"), + Expected: true, + }, + { + Name: "check name", + Input: SortType("name"), + Expected: true, + }, + { + Name: "check size", + Input: SortType("size"), + Expected: true, + }, + { + Name: "check unknown sort type", + Input: SortType("xxx"), + Expected: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := check(tt.Input) + assert.Equal(t, tt.Expected, res) + }) + } +} diff --git a/server/pkg/auth/builder.go b/server/pkg/auth/builder.go new file mode 100644 index 000000000..a2de4e7e3 --- /dev/null +++ b/server/pkg/auth/builder.go @@ -0,0 +1,102 @@ +package auth + +import ( + "time" + + "github.com/caos/oidc/pkg/oidc" + "github.com/reearth/reearth-backend/pkg/id" +) + +type RequestBuilder struct { + r *Request +} + +func NewRequest() *RequestBuilder { + return &RequestBuilder{r: &Request{}} +} + +func (b *RequestBuilder) Build() (*Request, error) { + if b.r.id.IsNil() { + return nil, id.ErrInvalidID + } + b.r.createdAt = time.Now() + return b.r, nil +} + +func (b *RequestBuilder) MustBuild() *Request { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *RequestBuilder) ID(id id.AuthRequestID) *RequestBuilder { + b.r.id = id + return b +} + +func (b *RequestBuilder) NewID() *RequestBuilder { + b.r.id = id.NewAuthRequestID() + return b +} + +func (b *RequestBuilder) ClientID(id string) *RequestBuilder { + b.r.clientID = id + return b +} + +func (b *RequestBuilder) Subject(subject string) *RequestBuilder { + b.r.subject = subject + return b +} + +func (b *RequestBuilder) Code(code string) *RequestBuilder { + b.r.code = code + return b +} + +func (b *RequestBuilder) State(state string) *RequestBuilder { + b.r.state = state + return b +} + +func (b *RequestBuilder) ResponseType(rt oidc.ResponseType) *RequestBuilder { + b.r.responseType = rt + return b +} + +func (b *RequestBuilder) Scopes(scopes []string) *RequestBuilder { + b.r.scopes = scopes + return b +} + +func (b *RequestBuilder) Audiences(audiences []string) *RequestBuilder { + b.r.audiences = audiences + return b +} + +func (b *RequestBuilder) RedirectURI(redirectURI string) *RequestBuilder { + b.r.redirectURI = redirectURI + return b +} + +func (b *RequestBuilder) Nonce(nonce string) *RequestBuilder { + b.r.nonce = nonce + return b +} + +func (b *RequestBuilder) CodeChallenge(CodeChallenge *oidc.CodeChallenge) *RequestBuilder { + b.r.codeChallenge = CodeChallenge + return b +} + +func (b *RequestBuilder) CreatedAt(createdAt time.Time) *RequestBuilder { + b.r.createdAt = createdAt + return b +} + +func (b *RequestBuilder) AuthorizedAt(authorizedAt *time.Time) *RequestBuilder { + b.r.authorizedAt = authorizedAt + return b +} diff --git a/server/pkg/auth/client.go b/server/pkg/auth/client.go new file mode 100644 index 000000000..0c7f6b5b6 --- /dev/null +++ b/server/pkg/auth/client.go @@ -0,0 +1,117 @@ +package auth + +import ( + "fmt" + "time" + + "github.com/caos/oidc/pkg/oidc" + "github.com/caos/oidc/pkg/op" +) + +const ClientID = "reearth-authsrv-client-default" + +type Client struct { + id string + applicationType op.ApplicationType + authMethod oidc.AuthMethod + accessTokenType op.AccessTokenType + responseTypes []oidc.ResponseType + grantTypes []oidc.GrantType + allowedScopes []string + redirectURIs []string + logoutRedirectURIs []string + loginURI string + idTokenLifetime time.Duration + clockSkew time.Duration + devMode bool +} + +func NewLocalClient(devMode bool, clientDomain string) op.Client { + return &Client{ + id: ClientID, + applicationType: op.ApplicationTypeWeb, + authMethod: oidc.AuthMethodNone, + accessTokenType: op.AccessTokenTypeJWT, + responseTypes: []oidc.ResponseType{oidc.ResponseTypeCode}, + grantTypes: []oidc.GrantType{oidc.GrantTypeCode, oidc.GrantTypeRefreshToken}, + redirectURIs: []string{clientDomain}, + allowedScopes: []string{"openid", "profile", "email"}, + loginURI: clientDomain + "/login?id=%s", + idTokenLifetime: 5 * time.Minute, + clockSkew: 0, + devMode: devMode, + } +} + +func (c *Client) GetID() string { + return c.id +} + +func (c *Client) RedirectURIs() []string { + return c.redirectURIs +} + +func (c *Client) PostLogoutRedirectURIs() []string { + return c.logoutRedirectURIs +} + +func (c *Client) LoginURL(id string) string { + return fmt.Sprintf(c.loginURI, id) +} + +func (c *Client) ApplicationType() op.ApplicationType { + return c.applicationType +} + +func (c *Client) AuthMethod() oidc.AuthMethod { + return c.authMethod +} + +func (c *Client) IDTokenLifetime() time.Duration { + return c.idTokenLifetime +} + +func (c *Client) AccessTokenType() op.AccessTokenType { + return c.accessTokenType +} + +func (c *Client) ResponseTypes() []oidc.ResponseType { + return c.responseTypes +} + +func (c *Client) GrantTypes() []oidc.GrantType { + return c.grantTypes +} + +func (c *Client) DevMode() bool { + return c.devMode +} + +func (c *Client) RestrictAdditionalIdTokenScopes() func(scopes []string) []string { + return func(scopes []string) []string { + return scopes + } +} + +func (c *Client) RestrictAdditionalAccessTokenScopes() func(scopes []string) []string { + return func(scopes []string) []string { + return scopes + } +} + +func (c *Client) IsScopeAllowed(scope string) bool { + for _, clientScope := range c.allowedScopes { + if clientScope == scope { + return true + } + } + return false +} + +func (c *Client) IDTokenUserinfoClaimsAssertion() bool { + return false +} + +func (c *Client) ClockSkew() time.Duration { + return c.clockSkew +} diff --git a/server/pkg/auth/request.go b/server/pkg/auth/request.go new file mode 100644 index 000000000..c2645b3b0 --- /dev/null +++ b/server/pkg/auth/request.go @@ -0,0 +1,143 @@ +package auth + +import ( + "time" + + "github.com/caos/oidc/pkg/oidc" + "github.com/reearth/reearth-backend/pkg/id" +) + +var essentialScopes = []string{"openid", "profile", "email"} + +type Request struct { + id id.AuthRequestID + clientID string + subject string + code string + state string + responseType oidc.ResponseType + scopes []string + audiences []string + redirectURI string + nonce string + codeChallenge *oidc.CodeChallenge + createdAt time.Time + authorizedAt *time.Time +} + +func (a *Request) ID() id.AuthRequestID { + return a.id +} + +func (a *Request) GetID() string { + return a.id.String() +} + +func (a *Request) GetACR() string { + return "" +} + +func (a *Request) GetAMR() []string { + return []string{ + "password", + } +} + +func (a *Request) GetAudience() []string { + if a.audiences == nil { + return make([]string, 0) + } + + return a.audiences +} + +func (a *Request) GetAuthTime() time.Time { + return a.createdAt +} + +func (a *Request) GetClientID() string { + return a.clientID +} + +func (a *Request) GetResponseMode() oidc.ResponseMode { + // TODO make sure about this + return oidc.ResponseModeQuery +} + +func (a *Request) GetCode() string { + return a.code +} + +func (a *Request) GetState() string { + return a.state +} + +func (a *Request) GetCodeChallenge() *oidc.CodeChallenge { + return a.codeChallenge +} + +func (a *Request) GetNonce() string { + return a.nonce +} + +func (a *Request) GetRedirectURI() string { + return a.redirectURI +} + +func (a *Request) GetResponseType() oidc.ResponseType { + return a.responseType +} + +func (a *Request) GetScopes() []string { + return unique(append(a.scopes, essentialScopes...)) +} + +func (a *Request) SetCurrentScopes(scopes []string) { + a.scopes = unique(append(scopes, essentialScopes...)) +} + +func (a *Request) GetSubject() string { + return a.subject +} + +func (a *Request) CreatedAt() time.Time { + return a.createdAt +} + +func (a *Request) SetCreatedAt(createdAt time.Time) { + a.createdAt = createdAt +} + +func (a *Request) AuthorizedAt() *time.Time { + return a.authorizedAt +} + +func (a *Request) SetAuthorizedAt(authorizedAt *time.Time) { + a.authorizedAt = authorizedAt +} + +func (a *Request) Done() bool { + return a.authorizedAt != nil +} + +func (a *Request) Complete(sub string) { + a.subject = sub + now := time.Now() + a.authorizedAt = &now +} + +func (a *Request) SetCode(code string) { + a.code = code +} + +func unique(list []string) []string { + allKeys := make(map[string]struct{}) + var uniqueList []string + for _, item := range list { + if _, ok := allKeys[item]; !ok { + allKeys[item] = struct{}{} + uniqueList = append(uniqueList, item) + } + } + return uniqueList +} diff --git a/server/pkg/builtin/main.go b/server/pkg/builtin/main.go new file mode 100644 index 000000000..b2979386b --- /dev/null +++ b/server/pkg/builtin/main.go @@ -0,0 +1,64 @@ +package builtin + +import ( + _ "embed" + + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +//go:embed manifest.yml +var pluginManifestJSON []byte + +//go:embed manifest_ja.yml +var pluginManifestJSON_ja []byte + +var pluginTranslationList = manifest.TranslationMap{ + "ja": manifest.MustParseTranslationFromBytes(pluginManifestJSON_ja), +} +var pluginManifest = manifest.MustParseSystemFromBytes(pluginManifestJSON, nil, pluginTranslationList.TranslatedRef()) + +// MUST NOT CHANGE +var ( + PropertySchemaIDVisualizerCesium = property.MustSchemaID("reearth/cesium") + PropertySchemaIDInfobox = property.MustSchemaID("reearth/infobox") +) + +func GetPropertySchemaByVisualizer(v visualizer.Visualizer) *property.Schema { + for _, p := range pluginManifest.ExtensionSchema { + if p.ID().String() == "reearth/"+string(v) { + return p + } + } + return nil +} + +func MustPropertySchemaByVisualizer(v visualizer.Visualizer) *property.Schema { + ps := GetPropertySchemaByVisualizer(v) + if ps == nil { + panic("property schema not found: " + v) + } + return ps +} + +func GetPropertySchema(id property.SchemaID) *property.Schema { + for _, p := range pluginManifest.ExtensionSchema { + if id == p.ID() { + return p + } + } + return nil +} + +func Plugin() *plugin.Plugin { + return pluginManifest.Plugin +} + +func GetPlugin(id plugin.ID) *plugin.Plugin { + if id.Equal(pluginManifest.Plugin.ID()) { + return pluginManifest.Plugin + } + return nil +} diff --git a/server/pkg/builtin/main_test.go b/server/pkg/builtin/main_test.go new file mode 100644 index 000000000..acd98c551 --- /dev/null +++ b/server/pkg/builtin/main_test.go @@ -0,0 +1,109 @@ +package builtin + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/visualizer" + "github.com/stretchr/testify/assert" +) + +func TestGetPropertySchemaByVisualizer(t *testing.T) { + tests := []struct { + name string + visualizer visualizer.Visualizer + expectedNil bool + }{ + { + name: "cesium", + visualizer: visualizer.VisualizerCesium, + expectedNil: false, + }, + { + name: "unsupported visualizer", + visualizer: "foo", + expectedNil: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res := GetPropertySchemaByVisualizer(tc.visualizer) + if tc.expectedNil { + assert.Nil(t, res) + } else { + assert.NotNil(t, res) + } + }) + } +} + +func TestPlugin(t *testing.T) { + assert.NotNil(t, Plugin()) +} +func TestGetPlugin(t *testing.T) { + tests := []struct { + name string + pluginID plugin.ID + expectedNil bool + }{ + { + name: "Official Plugin", + pluginID: plugin.OfficialPluginID, + expectedNil: false, + }, + { + name: "foo plugin", + pluginID: plugin.MustID("foo~1.1.1"), + expectedNil: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res := GetPlugin(tc.pluginID) + if tc.expectedNil { + assert.Nil(t, res) + } else { + assert.NotNil(t, res) + } + }) + } +} + +func TestGetPropertySchema(t *testing.T) { + tests := []struct { + name string + psId property.SchemaID + expectedNil bool + }{ + { + name: "Infobox", + psId: PropertySchemaIDInfobox, + expectedNil: false, + }, + { + name: "unknown propertySchemaId", + psId: property.MustSchemaID("xxx~1.1.1/aa"), + expectedNil: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res := GetPropertySchema(tc.psId) + if tc.expectedNil { + assert.Nil(t, res) + } else { + assert.NotNil(t, res) + } + }) + } +} diff --git a/server/pkg/builtin/manifest.yml b/server/pkg/builtin/manifest.yml new file mode 100644 index 000000000..1cb30f101 --- /dev/null +++ b/server/pkg/builtin/manifest.yml @@ -0,0 +1,1932 @@ +id: reearth +system: true +name: Re:Earth Official Plugin +description: Official Plugin +author: Re:Earth +extensions: + - id: cesium + name: Cesium + description: Select here to find scene settings in the right panel. This includes setting map tiles, atmospheric conditions, real lighting, and more. + visualizer: cesium + type: visualizer + schema: + groups: + - id: default + title: Scene + fields: + - id: camera + type: camera + title: Initial camera position + description: The starting position of your project. + - id: terrain + type: bool + title: Terrain + description: Show elevation when close to the surface. + - id: terrainType + type: string + title: Terrain type + description: Specify terrain type. + defaultValue: cesium + choices: + - key: cesium + label: Cesium World Terrain + - key: arcgis + label: ArcGIS Terrain + availableIf: + field: terrain + type: bool + value: true + - id: terrainExaggeration + type: number + title: Terrain exaggeration + description: A scalar used to exaggerate the terrain. Defaults to 1.0 (no exaggeration). A value of 2.0 scales the terrain by 2x. A value of 0.0 makes the terrain completely flat. + defaultValue: 1 + suffix: x + availableIf: + field: terrain + type: bool + value: true + - id: terrainExaggerationRelativeHeight + type: number + title: Terrain exaggeration relative height + description: The height from which terrain is exaggerated. Defaults to 0.0. Terrain that is above this height will scale upwards and terrain that is below this height will scale downwards. + defaultValue: 0 + suffix: m + availableIf: + field: terrain + type: bool + value: true + - id: depthTestAgainstTerrain + type: bool + title: Hide objects under terrain + description: Hides objects under the terrain. Depending on the loading status of the terrain, objects may be shown or hidden. + availableIf: + field: terrain + type: bool + value: true + - id: skybox + type: bool + title: Sky + defaultValue: true + description: Show the stars. + - id: bgcolor + type: string + title: Background color + description: With Sky disabled, choose a background color. + ui: color + - id: ion + type: string + title: Cesium Ion API access token + description: Cesium Ion account users may use their personal API keys to be able to use their Cesium Ion assets(tile data, 3D data, etc) with their project. + - id: sceneMode + type: string + title: Scene mode + description: Specify scene mode. + defaultValue: 3d + choices: + - key: 3d + label: Scene 3D + - key: 2d + label: Scene 2D + - key: columbus + label: Columbus View + - id: cameraLimiter + title: Camera Limiter + description: Set the camera limiting box. + fields: + - id: cameraLimitterEnabled + type: bool + title: Enable + defaultValue: false + description: Enable camera limiter. + - id: cameraLimitterShowHelper + type: bool + title: Show helper + defaultValue: false + description: Display the limiter boundaries. + - id: cameraLimitterTargetArea + type: camera + title: Target max height + description: The base position of the camera movement range. This position is the center point of the limit box in the horizontal and depth directions, and is the maximum height of the movable range. The camera will not be able to zoom out beyond the height specified here. + - id: cameraLimitterTargetWidth + type: number + title: Target width + description: Specifies the width (longitude direction) of the box that represents the limiter boundaries. + min: 5 + defaultValue: 1000000 + suffix: m + - id: cameraLimitterTargetLength + type: number + title: Target length + description: Specifies the depth (latitude direction) of the box that represents the limiter boundaries. + min: 5 + defaultValue: 1000000 + suffix: m + - id: tiles + title: Tiles + description: You may change the look of the Earth by obtaining map tile data and setting it here. + list: true + representativeField: tile_type + fields: + - id: tile_type + type: string + title: Tile type + defaultValue: default + choices: + - key: default + label: Default + - key: default_label + label: Labelled + - key: default_road + label: Road Map + - key: stamen_watercolor + label: Stamen Watercolor + - key: stamen_toner + label: Stamen Toner + - key: open_street_map + label: Open Street Map + - key: esri_world_topo + label: ESRI Topography + - key: black_marble + label: Earth at night + - key: japan_gsi_standard + label: Japan GSI Standard Map + - key: url + label: URL + - id: tile_url + type: string + title: Tile map URL + availableIf: + field: tile_type + type: string + value: url + - id: tile_minLevel + type: number + title: Minimum zoom level + min: 0 + max: 30 + - id: tile_maxLevel + type: number + title: Maximum zoom level + min: 0 + max: 30 + - id: tile_opacity + type: number + title: Opacity + description: "Change the opacity of the selected tile map. Min: 0 Max: 1" + defaultValue: 1 + ui: slider + min: 0 + max: 1 + - id: indicator + title: Indicator + description: Set the style of indicator shown when selecting a layer on the map. + fields: + - id: indicator_type + type: string + title: Type + defaultValue: default + description: Choose how the indicator will look. + choices: + - key: default + label: Default + - key: crosshair + label: Crosshair + - key: custom + label: Custom + - id: indicator_image + type: url + title: Image URL + ui: image + availableIf: + field: indicator_type + type: string + value: custom + - id: indicator_image_scale + type: number + title: Image scale + availableIf: + field: indicator_type + type: string + value: custom + - id: theme + title: Publish Theme + description: Set your theme. + fields: + - id: themeType + type: string + title: Theme + defaultValue: dark + description: Select the theme. + choices: + - key: dark + label: Re:Earth Dark + - key: light + label: Re:Earth Light + - key: forest + label: Forest + - key: custom + label: Custom theme + - id: themeTextColor + type: string + ui: color + title: Text color + description: Select a color. + defaultValue: "#434343" + availableIf: + field: themeType + type: string + value: custom + - id: themeSelectColor + type: string + ui: color + title: Select color + description: Select a color. + defaultValue: "#C52C63" + availableIf: + field: themeType + type: string + value: custom + - id: themeBackgroundColor + type: string + ui: color + title: Background color + description: Select a color. + defaultValue: "#DFE5F0" + availableIf: + field: themeType + type: string + value: custom + - id: atmosphere + title: Atmospheric Conditions + description: Set the look and feel of the Earth. + fields: + - id: enable_sun + type: bool + title: Sun + defaultValue: true + description: Display the Sun. + - id: enable_lighting + type: bool + title: Lighting + defaultValue: false + description: Display natural lighting from the sun. + - id: ground_atmosphere + type: bool + title: Ground atmosphere + defaultValue: true + description: Display a lower atmospheric layer. + - id: sky_atmosphere + type: bool + title: Sky atmosphere + defaultValue: true + description: Display an upper atmospheric layer. + - id: shadows + type: bool + title: Shadow + description: Display shadows on the Earth. Shadows for each layers should be also enabled to see them. + - id: fog + type: bool + title: Fog + defaultValue: true + description: Display customizable fog. + - id: fog_density + type: number + title: Fog density + defaultValue: 2.0e-4 + description: "Set a thickness to the fog. Min: 0 Max: 1" + min: 0 + max: 1 + - id: brightness_shift + type: number + title: Fog brightness + defaultValue: 0.03 + description: "Set brightness of the fog. Min: -1 Max: 1" + min: -1 + max: 1 + - id: hue_shift + type: number + title: Fog hue + description: "Set hue of the fog. Min: -1 Max: 1" + min: -1 + max: 1 + - id: surturation_shift + type: number + title: Fog saturation + description: "Set saturation of the fog. Min: -1 Max: 1" + min: -1 + max: 1 + - id: timeline + title: Timeline + fields: + - id: animation + type: bool + title: Animation + defaultValue: false + description: Enables animation play. If enabled, each 3D models can animate. + - id: visible + type: bool + title: Timeline + description: Whether the timeline UI is displayed or not + - id: current + type: string + title: Current time + ui: datetime + - id: start + type: string + title: Start time + description: If nothing is set, it will be set automatically according to the data being displayed. + ui: datetime + - id: stop + type: string + title: Stop time + description: If nothing is set, it will be set automatically according to the data being displayed. + ui: datetime + - id: stepType + type: string + title: Tick type + defaultValue: rate + description: How to specify the playback speed + choices: + - key: rate + label: Rate + - key: fixed + label: Fixed + - id: multiplier + type: number + title: Multiplier + description: Specifies the playback speed as a multiple of the real time speed. Negative values can also be specified. Default is 1x. + defaultValue: 1 + prefix: x + availableIf: + field: stepType + type: string + value: rate + - id: step + type: number + title: Step + description: Specifies the playback speed in seconds. Each time the screen is repeatedly drawn, it advances by the specified specified number of seconds. Negative values can also be specified. The default is 1 second. + defaultValue: 1 + suffix: s + availableIf: + field: stepType + type: string + value: fixed + - id: rangeType + type: string + title: Range + description: Specifies the playback speed in seconds. Negative values can also be specified. + defaultValue: unbounded + choices: + - key: unbounded + label: Unbounded + - key: clamped + label: Clamped + - key: bounced + label: Bounced + - id: googleAnalytics + title: Google Analytics + description: Set your Google Analytics tracking ID and analyze how your published project is being viewed. + fields: + - id: enableGA + type: bool + title: Enable + defaultValue: false + description: Enable Google Analytics + - id: trackingId + type: string + title: Tracking ID + description: Paste your Google Analytics tracking ID here. This will be embedded in your published project. + linkable: + url: + schemaGroupId: indicator + fieldId: indicator_image + - id: infobox + name: Infobox + type: infobox + description: Create an information area that appears when a layer is highlighted. Text, pictures, video, etc can be added to an infobox. + schema: + groups: + - id: default + title: Basic + fields: + - id: title + type: string + title: Title + - id: showTitle + type: bool + title: Show title + defaultValue: true + - id: position + type: string + title: Position + defaultValue: right + choices: + - key: right + label: Right + - key: middle + label: Middle + - key: left + label: Left + - id: size + type: string + title: Width + defaultValue: small + choices: + - key: small + label: Small + - key: medium + label: Medium + - key: large + label: Large + - id: heightType + type: string + title: Height type + defaultValue: auto + choices: + - key: auto + label: Auto + - key: manual + label: Manual + - id: height + type: number + title: Height + min: 284 + max: 2048 + suffix: px + description: "This sets the infobox height. Min: 284 Max: 2048" + availableIf: + field: heightType + type: string + value: manual + - id: infoboxPaddingTop + type: number + title: Top padding + min: 0 + max: 40 + suffix: px + description: "The space between the top of the infobox and the title. Min: 0 Max: 40" + - id: infoboxPaddingBottom + type: number + title: Bottom padding + min: 0 + max: 40 + suffix: px + description: "The space between the bottom of the infobox and the last block. Min: 0 Max: 40" + - id: infoboxPaddingLeft + type: number + title: Left padding + min: 0 + max: 40 + suffix: px + description: "The space between the left side of the infobox and the title and blocks. Min: 0 Max: 40" + - id: infoboxPaddingRight + type: number + title: Right padding + min: 0 + max: 40 + suffix: px + description: "The space between the right side of the infobox and the title and blocks. Min: 0 Max: 40" + - id: bgcolor + type: string + title: Background color + ui: color + - id: outlineWidth + type: number + title: Outline width + suffix: px + max: 20 + - id: outlineColor + type: string + title: Outline color + ui: color + - id: useMask + type: bool + title: Use mask + - id: typography + type: typography + title: Font + - id: marker + visualizer: cesium + type: primitive + name: Marker + description: A standard map marker. + schema: + groups: + - id: default + title: Marker + fields: + - id: location + type: latlng + title: Location + - id: height + type: number + title: Height + defaultValue: 0 + min: 0 + suffix: m + - id: heightReference + type: string + title: Height standard + description: The position relative to the terrain. By default, it is the absolute altitude from the WGS84 ellipsoid. + defaultValue: none + choices: + - key: none + label: Absolute + - key: clamp + label: Clamp to ground + - key: relative + label: Relative to ground + - id: style + type: string + title: Style + defaultValue: image + choices: + - key: point + label: Point + - key: image + label: Icon + - id: pointColor + type: string + title: Point color + ui: color + availableIf: + field: style + type: string + value: point + - id: pointSize + type: number + title: Point size + defaultValue: 10 + min: 0 + suffix: px + availableIf: + field: style + type: string + value: point + - id: pointOutlineColor + type: string + title: Point outline color + ui: color + availableIf: + field: style + type: string + value: point + - id: pointOutlineWidth + type: number + title: Point outline width + availableIf: + field: style + type: string + value: point + - id: image + type: url + title: Image URL + ui: image + availableIf: + field: style + type: string + value: image + - id: imageSize + type: number + title: Image scale + defaultValue: 1 + min: 0 + availableIf: + field: style + type: string + value: image + - id: imageHorizontalOrigin + type: string + title: Image horizontal origin + defaultValue: center + choices: + - key: left + label: Left + - key: center + label: Center + - key: right + label: Right + availableIf: + field: style + type: string + value: image + - id: imageVerticalOrigin + type: string + title: Image vertical origin + defaultValue: center + choices: + - key: top + label: Top + - key: center + label: Center + - key: baseline + label: Baseline + - key: bottom + label: Bottom + availableIf: + field: style + type: string + value: image + - id: imageColor + type: string + title: Image color + ui: color + availableIf: + field: style + type: string + value: image + - id: imageCrop + type: string + title: Image crop + defaultValue: none + choices: + - key: none + label: None + # - key: rounded + # label: Rounded + - key: circle + label: Circle + availableIf: + field: style + type: string + value: image + - id: imageShadow + type: bool + title: Image shadow + availableIf: + field: style + type: string + value: image + - id: imageShadowColor + type: string + title: Shadow color + ui: color + availableIf: + field: style + type: string + value: image + - id: imageShadowBlur + type: number + title: Shadow radius + defaultValue: 3 + suffix: px + availableIf: + field: style + type: string + value: image + - id: imageShadowPositionX + type: number + title: Shadow X + suffix: px + defaultValue: 0 + availableIf: + field: style + type: string + value: image + - id: imageShadowPositionY + type: number + title: Shadow Y + suffix: px + defaultValue: 0 + availableIf: + field: style + type: string + value: image + - id: label + type: bool + title: Label + - id: labelText + type: string + title: Label text + availableIf: + field: label + type: bool + value: true + - id: labelPosition + type: string + title: Label position + defaultValue: right + choices: + - key: right + label: Right + - key: left + label: Left + - key: top + label: Top + - key: bottom + label: Bottom + - key: righttop + label: Right top + - key: rightbottom + label: Right bottom + - key: lefttop + label: Left top + - key: leftbottom + label: Left bottom + availableIf: + field: label + type: bool + value: true + - id: labelTypography + type: typography + title: Label font + availableIf: + field: label + type: bool + value: true + - id: labelBackground + type: bool + title: Label background + availableIf: + field: label + type: bool + value: true + - id: extrude + type: bool + title: Extruded + linkable: + latlng: + schemaGroupId: default + fieldId: location + url: + schemaGroupId: default + fieldId: image + - id: polyline + visualizer: cesium + type: primitive + name: Polyline + description: Polyline primitive + schema: + groups: + - id: default + title: Polyline + fields: + - id: coordinates + type: coordinates + title: Coordinates + - id: strokeColor + type: string + title: Stroke color + ui: color + - id: strokeWidth + type: number + title: Stroke width + min: 0 + defaultValue: 1 + suffix: px + - id: polygon + visualizer: cesium + type: primitive + name: Polygon + description: Polygon primitive + schema: + groups: + - id: default + title: Polygon + fields: + - id: polygon + type: polygon + title: Polygon + - id: fill + type: bool + title: Fill + defaultValue: true + - id: fillColor + type: string + title: Fill color + ui: color + availableIf: + field: fill + type: bool + value: true + - id: stroke + type: bool + title: Stroke + - id: strokeColor + type: string + title: Stroke color + ui: color + availableIf: + field: stroke + type: bool + value: true + - id: strokeWidth + type: number + title: Stroke width + min: 0 + defaultValue: 1 + suffix: px + availableIf: + field: stroke + type: bool + value: true + - id: rect + visualizer: cesium + type: primitive + name: Rectangle + description: Rectangle primitive + schema: + groups: + - id: default + title: Rectangle + fields: + - id: rect + type: rect + title: Rect + - id: height + type: number + title: Height + defaultValue: 0 + min: 0 + suffix: m + - id: extrudedHeight + type: number + title: Extruded height + min: 0 + - id: style + type: string + title: Style + defaultValue: color + choices: + - key: color + label: Color + - key: image + label: Image + - id: fillColor + type: string + title: Fill + ui: color + availableIf: + field: style + type: string + value: color + - id: image + type: url + title: Image URL + ui: image + availableIf: + field: style + type: string + value: image + - id: photooverlay + visualizer: cesium + type: primitive + name: Photo overlay + description: An Icon marker that allows you to set a photo that will appear after reaching its location. + schema: + groups: + - id: default + title: Photo overlay + fields: + - id: location + type: latlng + title: Location + - id: height + type: number + title: Height + - id: heightReference + type: string + title: Height standard + description: The position relative to the terrain. By default, it is the absolute altitude from the WGS84 ellipsoid. + defaultValue: none + choices: + - key: none + label: Absolute + - key: clamp + label: Clamp to ground + - key: relative + label: Relative to ground + - id: camera + type: camera + title: Camera + description: Set the camera position for the overlay. + - id: image + type: url + ui: image + title: Icon + - id: imageSize + type: number + title: Icon size + prefix: x + defaultValue: 1 + - id: imageHorizontalOrigin + type: string + title: Image horizontal origin + defaultValue: center + choices: + - key: left + label: Left + - key: center + label: Center + - key: right + label: Right + - id: imageVerticalOrigin + type: string + title: Image vertical origin + defaultValue: center + choices: + - key: top + label: Top + - key: center + label: Center + - key: baseline + label: Baseline + - key: bottom + label: Bottom + - id: imageCrop + type: string + title: Icon crop + defaultValue: none + choices: + - key: none + label: None + # - key: rounded + # label: Rounded + - key: circle + label: Circle + - id: imageShadow + type: bool + title: Icon shadow + - id: imageShadowColor + type: string + title: Shadow color + ui: color + availableIf: + field: imageShadow + type: bool + value: true + - id: imageShadowBlur + type: number + title: Shadow radius + defaultValue: 3 + suffix: px + availableIf: + field: imageShadow + type: bool + value: true + - id: imageShadowPositionX + type: number + title: Shadow X + suffix: px + defaultValue: 0 + availableIf: + field: imageShadow + type: bool + value: true + - id: imageShadowPositionY + type: number + title: Shadow Y + suffix: px + defaultValue: 0 + availableIf: + field: imageShadow + type: bool + value: true + - id: photoOverlayImage + type: url + title: Photo + ui: image + - id: photoOverlayDescription + type: string + ui: multiline + title: Photo description + linkable: + latlng: + schemaGroupId: default + fieldId: location + # - id: rect + # visualizer: cesium + # type: primitive + # name: Rectangle + # description: A rectangle + # schema: + # groups: + # - id: default + # title: Rectangle + # fields: + # - id: rect + # type: rect + # title: Rectangle + # - id: height + # type: number + # title: Height + # defaultValue: 0 + # min: 0 + # suffix: m + # - id: heightReference + # type: string + # title: Height standard + # description: The position relative to the terrain. By default, it is the absolute altitude from the WGS84 ellipsoid. + # defaultValue: none + # choices: + # - key: none + # label: Absolute + # - key: clamp + # label: Clamp to ground + # - key: relative + # label: Relative to ground + # - id: extrudedHeight + # type: bool + # title: Extruded + # - id: style + # type: string + # title: Style + # defaultValue: color + # choices: + # - key: color + # label: Color + # - key: image + # label: Image + # - id: fillColor + # type: string + # title: Fill + # ui: color + # availableIf: + # field: style + # type: string + # value: color + # - id: image + # type: url + # title: Image + # ui: image + # availableIf: + # field: style + # type: string + # value: image + # - id: outlineColor + # type: string + # title: Fill + # ui: color + # - id: outlineWidth + # type: number + # title: Outline width + # suffix: px + # - id: shadows + # type: string + # title: Shadows + # description: Enables shadows. Also shadows in the scene should be enabled. Rendering may become overloaded. + # defaultValue: disabled + # choices: + # - key: disabled + # label: Disabled + # - key: enabled + # label: Enabled + # - key: cast_only + # label: Cast only + # - key: receive_only + # label: Receive only + - id: ellipsoid + visualizer: cesium + type: primitive + name: Sphere + description: A 3D ellipsoid + schema: + groups: + - id: default + title: Ellipsoid + fields: + - id: position + type: latlng + title: Position + - id: height + type: number + title: Height + defaultValue: 0 + min: 0 + suffix: m + - id: heightReference + type: string + title: Height standard + description: The position relative to the terrain. By default, it is the absolute altitude from the WGS84 ellipsoid. + defaultValue: none + choices: + - key: none + label: Absolute + - key: clamp + label: Clamp to ground + - key: relative + label: Relative to ground + - id: radius + type: number + title: Radius + defaultValue: 1000 + min: 0 + suffix: m + - id: fillColor + type: string + title: Fill + ui: color + - id: shadows + type: string + title: Shadows + description: Enables shadows. Also shadows in the scene should be enabled. Rendering may become overloaded. + defaultValue: disabled + choices: + - key: disabled + label: Disabled + - key: enabled + label: Enabled + - key: cast_only + label: Cast only + - key: receive_only + label: Receive only + linkable: + latlng: + schemaGroupId: default + fieldId: position + - id: model + visualizer: cesium + type: primitive + name: 3D Model + description: A 3D model + schema: + groups: + - id: default + title: 3D model + fields: + - id: model + type: url + title: URL + description: only glTF format is supported + - id: location + type: latlng + title: Location + - id: height + type: number + title: Height + defaultValue: 0 + min: 0 + suffix: m + - id: heightReference + type: string + title: Height standard + description: The position relative to the terrain. By default, it is the absolute altitude from the WGS84 ellipsoid. + defaultValue: none + choices: + - key: none + label: Absolute + - key: clamp + label: Clamp to ground + - key: relative + label: Relative to ground + - id: heading + type: number + title: Heading + suffix: ยฐ + defaultValue: 0 + - id: pitch + type: number + title: Pitch + suffix: ยฐ + defaultValue: 0 + - id: roll + type: number + title: Roll + suffix: ยฐ + defaultValue: 0 + - id: scale + type: number + title: Scale + description: Displays the 3D model equally multiplied by the set value. + prefix: x + defaultValue: 1 + - id: maximumScale + type: number + title: Max scale + prefix: x + - id: minimumPixelSize + type: number + title: Min size + suffix: px + - id: animation + type: bool + title: Animation + description: Enables aniamtion. The scene animation should also be enabled. + defaultValue: true + - id: appearance + title: Appearance + fields: + - id: shadows + type: string + title: Shadows + description: Enables shadows. Also shadows in the scene should be enabled. Rendering may become overloaded. + defaultValue: disabled + choices: + - key: disabled + label: Disabled + - key: enabled + label: Enabled + - key: cast_only + label: Cast only + - key: receive_only + label: Receive only + - id: colorBlend + type: string + title: Color blend + defaultValue: none + choices: + - key: none + label: None + - key: highlight + label: Highlight + - key: replace + label: Replace + - key: mix + label: Mix + - id: color + type: string + title: Color + ui: color + - id: colorBlendAmount + type: number + title: Color blend amount + description: O + min: 0 + max: 1 + defaultValue: 1 + availableIf: + field: colorBlend + type: string + value: mix + - id: lightColor + type: string + title: Light color + ui: color + - id: silhouette + type: bool + title: Silhouette + - id: silhouetteColor + type: string + title: Silhouette color + ui: color + availableIf: + field: silhouette + type: bool + value: true + - id: silhouetteSize + type: number + title: Silhouette size + defaultValue: 1 + suffix: px + availableIf: + field: silhouette + type: bool + value: true + linkable: + latlng: + schemaGroupId: default + fieldId: location + - id: tileset + visualizer: cesium + type: primitive + name: 3D Tiles + description: 3D tiles in "3D Tiles" format + schema: + groups: + - id: default + title: Model + fields: + - id: tileset + type: url + title: Tileset URL + description: A path to tileset.json in 3D tiles + - id: styleUrl + type: url + title: Styling URL + description: Optional. A path to a JSON file in 3D Tiles styles + - id: shadows + type: string + title: Shadows + description: Enables shadows. Also shadows in the scene should be enabled. Rendering may become overloaded. + defaultValue: disabled + choices: + - key: disabled + label: Disabled + - key: enabled + label: Enabled + - key: cast_only + label: Cast only + - key: receive_only + label: Receive only + - id: resource + visualizer: cesium + type: primitive + name: File + description: Import your own primitives to be used instead of Re:Earth's built in ones. + schema: + groups: + - id: default + title: File + fields: + - id: url + type: url + title: File URL + ui: file + - id: type + type: string + title: File format + defaultValue: auto + choices: + - key: auto + label: Auto + - key: kml + label: KML + - key: geojson + label: GeoJSON / TopoJSON + - key: czml + label: CZML + - id: clampToGround + type: bool + title: Clamp to ground + description: Option to attach the primitive to the ground (keeps primitive on top of the map even with terrain enabled). + defaultValue: false + - id: textblock + type: block + name: Text + description: Text block + schema: + groups: + - id: default + title: Text block + fields: + - id: title + type: string + title: Title + - id: text + type: string + title: Content + ui: multiline + - id: markdown + type: bool + title: Use markdown + - id: paddingTop + type: number + title: Top padding + min: 0 + max: 40 + suffix: px + description: "The space between the top edge of the text block and the uppermost text. Min: 0 Max: 40" + - id: paddingBottom + type: number + title: Bottom padding + min: 0 + max: 40 + suffix: px + description: "The space between the bottom edge of the text block and the last text. Min: 0 Max: 40" + - id: paddingLeft + type: number + title: Left padding + min: 0 + max: 40 + suffix: px + description: "The space between the left edge of the text block and the text. Min: 0 Max: 40" + - id: paddingRight + type: number + title: Right padding + min: 0 + max: 40 + suffix: px + description: "The space between the right edge of the text block and the text. Min: 0 Max: 40" + - id: typography + type: typography + title: Font + - id: imageblock + type: block + name: Image + description: Image block + schema: + groups: + - id: default + title: Image block + fields: + - id: image + type: url + title: Image + ui: image + - id: title + type: string + title: Title + - id: fullSize + type: bool + title: Full size + - id: imageSize + type: string + title: Image size + defaultValue: cover + choices: + - key: cover + label: Cover + - key: contain + label: Contain + - id: imagePositionX + type: string + title: Image horizontal position + defaultValue: center + choices: + - key: left + label: Left + - key: center + label: Center + - key: right + label: Right + - id: imagePositionY + type: string + title: Image vertical position + defaultValue: center + choices: + - key: top + label: Top + - key: center + label: Center + - key: bottom + label: Bottom + linkable: + url: + schemaGroupId: default + fieldId: image + - id: videoblock + type: block + name: Video + description: Video block + schema: + groups: + - id: default + title: Video block + fields: + - id: url + type: url + title: Video URL + ui: video + - id: title + type: string + title: Title + - id: fullSize + type: bool + title: Full size + - id: locationblock + type: block + name: Location + description: Location block + schema: + groups: + - id: default + title: Location block + fields: + - id: location + type: latlng + title: Location + - id: title + type: string + title: Title + - id: fullSize + type: bool + title: Full size + - id: dlblock + type: block + name: Table + description: Table block + schema: + groups: + - id: default + title: Table block + fields: + - id: title + type: string + title: Title + - id: typography + type: typography + title: Font + - id: items + list: true + title: Items + fields: + - id: item_title + type: string + title: Title + - id: item_datatype + type: string + title: Type + defaultValue: string + choices: + - key: string + label: String + - key: number + label: Number + - id: item_datastr + type: string + title: Data + availableIf: + field: item_datatype + type: string + value: string + - id: item_datanum + type: number + title: Data + availableIf: + field: item_datatype + type: string + value: number + # - id: navigator + # type: widget + # title: Navigator + # description: Navigator widget + - id: menu + type: widget + name: Menu (legacy) + description: Menu widgets + singleOnly: true + widgetLayout: + floating: true + schema: + groups: + - id: buttons + title: Buttons + list: true + fields: + - id: buttonInvisible + type: bool + title: Hide + - id: buttonTitle + type: string + title: Title + - id: buttonPosition + type: string + title: Position + defaultValue: topleft + choices: + - key: topleft + label: Top-Left + - key: topright + label: Top-Right + - key: bottomleft + label: Bottom-Left + - key: bottomright + label: Bottom-Right + - id: buttonStyle + type: string + title: Style + defaultValue: text + choices: + - key: text + label: Text only + - key: icon + label: Icon only + - key: texticon + label: Text and icon + - id: buttonIcon + type: url + title: Icon + ui: image + - id: buttonColor + type: string + title: Text color + ui: color + - id: buttonBgcolor + type: string + title: Background color + ui: color + - id: buttonType + type: string + title: Type + defaultValue: link + choices: + - key: link + label: Link + - key: menu + label: Menu + - key: camera + label: Camera flight + - id: buttonLink + type: url + title: Link + availableIf: + field: buttonType + type: string + value: link + - id: buttonCamera + type: camera + title: Camera flight + availableIf: + field: buttonType + type: string + value: camera + - id: menu + title: Menu + list: true + fields: + - id: menuTitle + type: string + title: Title + - id: menuIcon + type: url + title: Icon + - id: menuType + type: string + title: Type + defaultValue: link + choices: + - key: link + label: Link + - key: camera + label: Camera + - key: border + label: Break + - id: menuLink + type: url + title: Link + availableIf: + field: menuType + type: string + value: link + - id: menuCamera + type: camera + title: Camera + availableIf: + field: menuType + type: string + value: camera + - id: button + type: widget + name: Button + description: Button widget + widgetLayout: + defaultLocation: + zone: outer + section: left + area: top + schema: + groups: + - id: default + title: Button + fields: + - id: buttonTitle + type: string + title: Title + - id: buttonStyle + type: string + title: Style + defaultValue: text + choices: + - key: text + label: Text only + - key: icon + label: Icon only + - key: texticon + label: Text and icon + - id: buttonIcon + type: url + title: Icon + ui: image + - id: buttonColor + type: string + title: Text color + ui: color + - id: buttonBgcolor + type: string + title: Background color + ui: color + - id: buttonType + type: string + title: Type + defaultValue: link + choices: + - key: link + label: Link + - key: menu + label: Menu + - key: camera + label: Camera flight + - id: buttonLink + type: url + title: Link + availableIf: + field: buttonType + type: string + value: link + - id: buttonCamera + type: camera + title: Camera flight + availableIf: + field: buttonType + type: string + value: camera + - id: menu + title: Menu + list: true + availableIf: + field: buttonType + type: string + value: menu + fields: + - id: menuTitle + type: string + title: Title + - id: menuIcon + type: url + title: Icon + - id: menuType + type: string + title: Type + defaultValue: link + choices: + - key: link + label: Link + - key: camera + label: Camera + - key: border + label: Break + - id: menuLink + type: url + title: Link + availableIf: + field: menuType + type: string + value: link + - id: menuCamera + type: camera + title: Camera + availableIf: + field: menuType + type: string + value: camera + - id: splashscreen + type: widget + name: Splash screen + description: A unique start screen that will display on load of your archive(ex. display the archive's title). + singleOnly: true + widgetLayout: + floating: true + schema: + groups: + - id: overlay + title: Overlay screen + fields: + - id: overlayEnabled + type: bool + title: Enabled + - id: overlayDelay + type: number + title: Delay + suffix: s + min: 0 + - id: overlayDuration + type: number + title: Duration + suffix: s + min: 0 + - id: overlayTransitionDuration + type: number + title: Fade out + suffix: s + min: 0 + - id: overlayImage + type: url + title: Image + ui: image + - id: overlayImageW + type: number + title: Image width + suffix: px + - id: overlayImageH + type: number + title: Image height + suffix: px + - id: overlayBgcolor + type: string + title: Background color + ui: color + - id: camera + title: Camera flight sequence + list: true + fields: + - id: cameraPosition + type: camera + title: Camera position + - id: cameraDuration + type: number + title: Duration + suffix: s + min: 0 + - id: cameraDelay + type: number + title: Delay + suffix: s + min: 0 + - id: storytelling + type: widget + name: Storytelling + description: SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily. + singleOnly: true + widgetLayout: + extendable: + horizontally: true + defaultLocation: + zone: outer + section: left + area: bottom + schema: + groups: + - id: default + title: Storytelling + fields: + - id: duration + title: Duration + type: number + suffix: s + defaultValue: 3 + - id: range + title: Range + type: number + suffix: m + defaultValue: 50000 + - id: camera + title: Camera pose + type: camera + ui: camera_pose + - id: autoStart + title: Auto start + type: bool + - id: stories + title: Stories + representativeField: layer + list: true + fields: + - id: layer + title: Layer + type: ref + ui: layer + - id: layerDuration + title: Duration + type: number + suffix: s + - id: layerRange + title: Range + type: number + suffix: m + - id: layerCamera + title: Camera position + type: camera + - id: cluster + name: Cluster + type: cluster + description: Defines how layers are clustered together and displayed on earth. + schema: + groups: + - id: default + title: Cluster + fields: + - id: clusterPixelRange + type: number + description: Sets the minimum range between layers to get clustered together. + title: Pixel range + defaultValue: 15 + min: 1 + max: 200 + suffix: px + - id: clusterMinSize + type: number + title: Minimum cluster size + description: The minimum number of layers that can be clustered. + defaultValue: 3 + min: 2 + max: 20 + - id: clusterImage + type: url + title: Image + description: Sets the image of cluster entity. + ui: image + - id: clusterImageWidth + type: number + title: Width + suffix: px + description: Sets the image width. + - id: clusterImageHeight + type: number + title: Height + suffix: px + description: Sets the image height. + - id: clusterLabelTypography + type: typography + title: Font + - id: layers + title: Layers + representativeField: layer + list: true + fields: + - id: layer + title: Layer + type: ref + ui: layer + - id: timeline + type: widget + name: Timeline + description: A timeline widget that allows for viewing layers and data at specific points in time. + singleOnly: true + widgetLayout: + extendable: + horizontally: true + defaultLocation: + zone: outer + section: center + area: bottom diff --git a/server/pkg/builtin/manifest_ja.yml b/server/pkg/builtin/manifest_ja.yml new file mode 100644 index 000000000..56694679e --- /dev/null +++ b/server/pkg/builtin/manifest_ja.yml @@ -0,0 +1,909 @@ +name: Re:Earthๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ +description: ๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ +extensions: + cesium: + name: Cesium + description: ๅณใƒ‘ใƒใƒซใงใ‚ทใƒผใƒณๅ…จไฝ“ใฎ่จญๅฎšใ‚’ๅค‰ๆ›ดใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ‚ฟใ‚คใƒซใ€ๅคงๆฐ—ใ€ใƒฉใ‚คใƒ†ใ‚ฃใƒณใ‚ฐใชใฉใฎ่จญๅฎšใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚ + propertySchema: + default: + title: ใ‚ทใƒผใƒณ + fields: + camera: + title: ใ‚ซใƒกใƒฉๅˆๆœŸไฝ็ฝฎ + description: ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒๆœ€ๅˆใซ่กจ็คบใ™ใ‚‹ใ‚ซใƒกใƒฉใฎไฝ็ฝฎใ‚’่จญๅฎšใ—ใพใ™ใ€‚ + terrain: + title: ๅœฐๅฝข + description: ๆœ‰ๅŠนใซใ™ใ‚‹ใจใ€ๆจ™้ซ˜ใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใฟใ“ใพใ‚Œใ€็ซ‹ไฝ“็š„ใชๅœฐๅฝขใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + terrainType: + title: ๅœฐๅฝขใฎ็จฎ้กž + description: ๅœฐๅฝขใฎ็จฎ้กžใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + terrainExaggeration: + title: ๅœฐๅฝขใฎๅผท่ชฟ + description: ๅœฐๅฝขใฎๅผท่ชฟใ‚’่จญๅฎšใ—ใพใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ1.0๏ผˆ่ช‡ๅผตใ—ใชใ„๏ผ‰ใงใ™ใ€‚2.0ใฏใ€ๅœฐๅฝขใ‚’2ๅ€ใซๆ‹กๅคงใ—ใพใ™ใ€‚0.0ใฎๅ€คใฏๅœฐๅฝขใ‚’ๅฎŒๅ…จใซๅนณใ‚‰ใซใ—ใพใ™ใ€‚ + terrainExaggerationRelativeHeight: + title: ๅœฐๅฝขใฎๅผท่ชฟใฎๅŸบๆบ– + description: ๅœฐๅฝขใŒ่ช‡ๅผตใ•ใ‚Œใ‚‹้ซ˜ใ•ใงใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ0.0ใงใ™ใ€‚ใ“ใฎ้ซ˜ใ•ใ‚ˆใ‚ŠไธŠใซใ‚ใ‚‹ๅœฐๅฝขใฏไธŠๆ–นใซใ€ใ“ใฎ้ซ˜ใ•ใ‚ˆใ‚Šไธ‹ใซใ‚ใ‚‹ๅœฐๅฝขใฏไธ‹ๆ–นใซใ‚นใ‚ฑใƒผใƒซใ•ใ‚Œใพใ™ใ€‚ + depthTestAgainstTerrain: + title: ๅœฐๅฝขใฎไธ‹ใ‚’้ž่กจ็คบ + description: ๅœฐๅฝขใฎไธ‹ใซใ‚ใ‚‹ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’้ž่กจ็คบใซใ—ใพใ™ใ€‚ๆจ™้ซ˜ใƒ‡ใƒผใ‚ฟใฎ่ชญใฟ่พผใฟ็Šถๆณใซใ‚ˆใฃใฆใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใŒ่กจ็คบใ•ใ‚ŒใŸใ‚Š้š ใ‚ŒใŸใ‚Šใ™ใ‚‹ใ“ใจใŒใ‚ใ‚Šใพใ™ใ€‚ + skybox: + title: ๅฎ‡ๅฎ™ใฎ่กจ็คบ + description: ๅฎ‡ๅฎ™็ฉบ้–“ใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ + bgcolor: + title: ่ƒŒๆ™ฏ่‰ฒ + description: ๅฎ‡ๅฎ™็ฉบ้–“ใŒ้ž่กจ็คบใฎๅ ดๅˆใฎใ€่ƒŒๆ™ฏ่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚ + ion: + title: Cesium Icon APIใ‚ขใ‚ฏใ‚ปใ‚นใƒˆใƒผใ‚ฏใƒณ + description: ่‡ช่บซใฎCesium Ionใ‚ขใ‚ซใ‚ฆใƒณใƒˆใ‹ใ‚‰APIใ‚ญใƒผใ‚’็™บ่กŒใ—ใ€ใ“ใ“ใซ่จญๅฎšใ—ใพใ™ใ€‚Cesium Ionใฎใ‚ขใ‚ปใƒƒใƒˆ๏ผˆใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ€3Dใƒ‡ใƒผใ‚ฟใชใฉ๏ผ‰ใฎไฝฟ็”จใŒๅฏ่ƒฝใซใชใ‚‹ใŸใ‚ใ€่จญๅฎšใ‚’ๆŽจๅฅจใ—ใพใ™ใ€‚ + sceneMode: + title: ใ‚ทใƒผใƒณใƒขใƒผใƒ‰ + description: ๅœฐ็ƒๅ„€ใฎ่กจ็คบใ‚’2Dใ€2.5Dใ€3Dใ‹ใ‚‰้ธๆŠžใ—ใพใ™ใ€‚ + cameraLimiter: + title: ใ‚ซใƒกใƒฉ็ฏ„ๅ›ฒๅˆถ้™ + description: ใ‚ซใƒกใƒฉใฎ็งปๅ‹•ใงใใ‚‹็ฏ„ๅ›ฒใ‚’ๅˆถ้™ใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + fields: + cameraLimitterEnabled: + title: ๆœ‰ๅŠน + description: ใ‚ซใƒกใƒฉใฎ็ฏ„ๅ›ฒๅˆถ้™ใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚ + cameraLimitterShowHelper: + title: ็ฏ„ๅ›ฒใ‚’่กจ็คบ + description: ใ‚ซใƒกใƒฉใฎ็งปๅ‹•็ฏ„ๅ›ฒใ‚’่กจใ™ใƒœใƒƒใ‚ฏใ‚นใ‚’่กจ็คบใ—ใพใ™ใ€‚ + cameraLimitterTargetArea: + title: ๅŸบๆบ–ไฝ็ฝฎ + description: ็งปๅ‹•็ฏ„ๅ›ฒใฎๅŸบๆบ–ใจใชใ‚‹ไฝ็ฝฎใงใ™ใ€‚ใ“ใฎไฝ็ฝฎใŒๅˆถ้™็ฏ„ๅ›ฒใ‚’่กจใ™ใƒœใƒƒใ‚ฏใ‚นใฎๆจชๆ–นๅ‘ใจๅฅฅ่กŒใๆ–นๅ‘ใฎไธญๅฟƒ็‚นใจใชใ‚Šใ€ใ‹ใค็งปๅ‹•ๅฏ่ƒฝ็ฏ„ๅ›ฒใซใŠใ‘ใ‚‹ๆœ€ๅคงใฎ้ซ˜ใ•ใจใชใ‚Šใพใ™ใ€‚ใ“ใ“ใงๆŒ‡ๅฎšใ—ใŸ้ซ˜ใ•ไปฅไธŠใซใ‚ซใƒกใƒฉใ‚’ใ‚บใƒผใƒ ใ‚ขใ‚ฆใƒˆใ™ใ‚‹ใ“ใจใŒใงใใชใใชใ‚Šใพใ™ใ€‚ + cameraLimitterTargetWidth: + title: ่ปฝๅบฆใฎ็ฏ„ๅ›ฒ + description: ๅˆถ้™็ฏ„ๅ›ฒใ‚’่กจใ™ใƒœใƒƒใ‚ฏใ‚นใฎๅน…๏ผˆ็ตŒๅบฆใฎๆ–นๅ‘๏ผ‰ใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + cameraLimitterTargetLength: + title: ็ทฏๅบฆใฎ็ฏ„ๅ›ฒ + description: ๅˆถ้™็ฏ„ๅ›ฒใ‚’่กจใ™ใƒœใƒƒใ‚ฏใ‚นใฎๅฅฅ่กŒใ๏ผˆ็ทฏๅบฆใฎๆ–นๅ‘๏ผ‰ใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + tiles: + title: ใ‚ฟใ‚คใƒซ + description: ๆ‰‹ๆŒใกใฎใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ‚’ไฝฟ็”จใ—ใ€ๅœฐ็ƒไธŠใซ่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + fields: + tile_title: + title: ๅๅ‰ + tile_type: + title: ็จฎ้กž + choices: + default: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + default_label: ใƒฉใƒ™ใƒซไป˜ใๅœฐๅ›ณ + default_road: ้“่ทฏๅœฐๅ›ณ + stamen_watercolor: Stamen Watercolor + stamen_toner: Stamen Toner + open_street_map: Open Street Map + esri_world_topo: ESRI Topography + black_marble: Black Marble + japan_gsi_standard: ๅœฐ็†้™ขๅœฐๅ›ณ ๆจ™ๆบ–ๅœฐๅ›ณ + url: URL + tile_url: + title: URL + tile_minLevel: + title: ๆœ€ๅฐใƒฌใƒ™ใƒซ + tile_maxLevel: + title: ๆœ€ๅคงใƒฌใƒ™ใƒซ + tile_opacity: + title: ไธ้€ๆ˜Žๆ€ง + description: NEEDS DESCRIPTION + indicator: + title: ใ‚คใƒณใƒ‡ใ‚ฃใ‚ฑใƒผใ‚ฟใƒผ + description: ใƒฌใ‚คใƒคใƒผ้ธๆŠžๆ™‚ใซ่กจ็คบใ•ใ‚Œใ‚‹ใ‚คใƒณใƒ‡ใ‚ฃใ‚ฑใƒผใ‚ฟใƒผใฎใ‚นใ‚ฟใ‚คใƒซใ‚’่จญๅฎšใ—ใพใ™ + fields: + indicator_type: + title: ใ‚ฟใ‚คใƒ— + description: ใ‚คใƒณใƒ‡ใ‚ฃใ‚ฑใƒผใ‚ฟใƒผใฎ่ฆ‹ใŸ็›ฎใ‚’่จญๅฎšใ—ใพใ™ + choices: + default: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + crosshair: ๅๅญ—็ทš + custom: ใ‚ซใ‚นใ‚ฟใƒ  + indicator_image: + title: ็”ปๅƒURL + indicator_image_scale: + title: ็”ปๅƒใ‚ตใ‚คใ‚บ + atmosphere: + title: ๅคงๆฐ— + description: ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎ่จญๅฎšใŒใงใใพใ™ใ€‚ + fields: + enable_sun: + title: ๅคช้™ฝ + description: ๅฎ‡ๅฎ™็ฉบ้–“ใซๅญ˜ๅœจใ™ใ‚‹ๅคช้™ฝใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ + enable_lighting: + title: ๅคช้™ฝๅ…‰ + description: ๅคช้™ฝๅ…‰ใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใ‚‹ใ“ใจใงใ€ๆ˜ผๅคœใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + ground_atmosphere: + title: ๅœฐ่กจใฎๅคงๆฐ— + description: ๅœฐ่กจใฎๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ + sky_atmosphere: + title: ไธŠ็ฉบใฎๅคงๆฐ— + description: ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ + shadows: + title: ๅฝฑ + description: ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใŒ่ฝใจใ™ๅฝฑใ‚’่กจ็คบใ—ใพใ™ใ€‚ไฝตใ›ใฆๅ„ใƒฌใ‚คใƒคใƒผใฎๅฝฑใฎ่จญๅฎšใ‚’ใใ‚Œใžใ‚Œๆœ‰ๅŠนใซใ™ใ‚‹ๅฟ…่ฆใŒใ‚ใ‚Šใพใ™ใ€‚ + fog: + title: ้œง + description: ้œงใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ + fog_density: + title: ๆฟƒๅบฆ + description: ้œงใฎๆฟƒๅบฆใ‚’0ไปฅไธŠใ‹ใ‚‰่จญๅฎšใ—ใพใ™ใ€‚ + brightness_shift: + title: ๆ˜Žๅบฆ + hue_shift: + title: ่‰ฒ็›ธ + surturation_shift: + title: ๅฝฉๅบฆ + timeline: + title: ใ‚ฟใ‚คใƒ ใƒฉใ‚คใƒณ + fields: + animation: + title: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณ + description: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใฎๅ†็”Ÿใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚3Dใƒขใƒ‡ใƒซใฎใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใŒๅ†็”Ÿใ•ใ‚Œใ‚‹ใ‚ˆใ†ใซใชใ‚Šใพใ™ใ€‚ + visible: + title: ใ‚ฟใ‚คใƒ ใƒฉใ‚คใƒณ + description: ใ‚ฟใ‚คใƒ ใƒฉใ‚คใƒณใฎUIใ‚’่กจ็คบใ—ใพใ™ใ€‚ + current: + title: ็พๅœจๆ™‚ๅˆป + start: + title: ้–‹ๅง‹ๆ™‚ๅˆป + description: ไฝ•ใ‚‚่จญๅฎšใ•ใ‚Œใฆใ„ใชใ„ๅ ดๅˆใฏ่กจ็คบไธญใฎใƒ‡ใƒผใ‚ฟใซใ‚ˆใฃใฆ่‡ชๅ‹•็š„ใซ่จญๅฎšใ•ใ‚Œใพใ™ใ€‚ + stop: + title: ็ต‚ไบ†ๆ™‚ๅˆป + description: ไฝ•ใ‚‚่จญๅฎšใ•ใ‚Œใฆใ„ใชใ„ๅ ดๅˆใฏ่กจ็คบไธญใฎใƒ‡ใƒผใ‚ฟใซใ‚ˆใฃใฆ่‡ชๅ‹•็š„ใซ่จญๅฎšใ•ใ‚Œใพใ™ใ€‚ + stepType: + title: ๅ†็”Ÿ้€Ÿๅบฆใฎ็จฎ้กž + description: ๅ†็”Ÿ้€ŸๅบฆใฎๆŒ‡ๅฎšๆ–นๆณ•ใงใ™ใ€‚ + choices: + rate: ๅ€็Ž‡ + fixed: ๅ›บๅฎš + multiplier: + title: ๅ†็”Ÿ้€Ÿๅบฆ + description: ๅ†็”Ÿ้€Ÿๅบฆใ‚’็พๅฎŸใฎๆ™‚้–“ใฎ้€Ÿใ•ใซๅฏพใ™ใ‚‹ๅ€็Ž‡ใงๆŒ‡ๅฎšใ—ใพใ™ใ€‚ใƒžใ‚คใƒŠใ‚นๅ€คใ‚‚ๆŒ‡ๅฎšๅฏ่ƒฝใงใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ1ๅ€ใงใ™ใ€‚ + step: + title: ๅ†็”Ÿ้€Ÿๅบฆ + description: ๅ†็”Ÿ้€Ÿๅบฆใ‚’็ง’ใงๆŒ‡ๅฎšใ—ใพใ™ใ€‚็”ป้ขใฎๆ็”ปใŒ็นฐใ‚Š่ฟ”ใ•ใ‚Œใ‚‹ๅบฆใซๆŒ‡ๅฎšใ—ใŸๆŒ‡ๅฎšใ—ใŸ็ง’ๆ•ฐๅˆ†้€ฒใฟใพใ™ใ€‚ใƒžใ‚คใƒŠใ‚นๅ€คใ‚‚ๆŒ‡ๅฎšๅฏ่ƒฝใงใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ1็ง’ใงใ™ใ€‚ + suffix: ็ง’ + rangeType: + title: ๅ†็”Ÿ็ต‚ไบ†ๆ™‚ + description: ็พๅœจๆ™‚ๅˆปใŒ็ต‚ไบ†ๆ™‚ๅˆปใซ้”ใ—ใŸๆ™‚ใฎๅ‡ฆ็†ๆ–นๆณ•ใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + choices: + unbounded: ๅ†็”Ÿใ‚’็ถšใ‘ใ‚‹ + clamped: ๅœๆญขใ™ใ‚‹ + bounced: ใƒใ‚ฆใƒณใ‚น + googleAnalytics: + title: Google Analytics + description: Google Analyticsใ‚’ๆœ‰ๅŠนใซใ™ใ‚‹ใ“ใจใงใ€ๅ…ฌ้–‹ใƒšใƒผใ‚ธใŒใฉใฎใ‚ˆใ†ใซ้–ฒ่ฆงใ•ใ‚Œใฆใ„ใ‚‹ใ‹ใ‚’ๅˆ†ๆžใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚ + fields: + enableGA: + title: ๆœ‰ๅŠน + description: Google Analyticsใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚ + trackingCode: + title: ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐID + description: ใ“ใ“ใซใ‚ฐใƒผใ‚ฐใƒซใ‚ขใƒŠใƒชใƒ†ใ‚ฃใ‚ฏใ‚นใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐIDใ‚’่ฒผใ‚Šไป˜ใ‘ใ‚‹ใ“ใจใงใ€ๅ…ฌ้–‹ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใซใ“ใฎใ‚ณใƒผใƒ‰ใŒๅŸ‹ใ‚่พผใพใ‚Œใพใ™ใ€‚ + theme: + title: ๅ…ฌ้–‹็”จใƒ†ใƒผใƒž + description: ๅ…ฌ้–‹็”จใฎใƒ†ใƒผใƒžใ‚’่จญๅฎšใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + fields: + themeType: + title: ใƒ†ใƒผใƒž + description: ใƒ†ใƒผใƒžใฎ็จฎ้กžใ‚’้ธๆŠžใ—ใพใ™ใ€‚ใ‚ˆใ‚Š็ดฐใ‹ใใƒ†ใƒผใƒžใ‚’ๆŒ‡ๅฎšใ—ใŸใ„ๅ ดๅˆใฏใ‚ซใ‚นใ‚ฟใƒ ใ‚’้ธๆŠžใ—ใพใ™ใ€‚ + choices: + dark: Re:Earth ใƒ€ใƒผใ‚ฏ + light: Re:Earth ใƒฉใ‚คใƒˆ + forest: ๆฃฎ + custom: ใ‚ซใ‚นใ‚ฟใƒ  + themeTextColor: + title: ๆ–‡ๅญ—่‰ฒ + description: ๆ–‡ๅญ—่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚ + themeSelectColor: + title: ้ธๆŠž่‰ฒ + description: ้ธๆŠž่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚ + themeBackgroundColor: + title: ่ƒŒๆ™ฏ่‰ฒ + description: ่ƒŒๆ™ฏ่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚ + infobox: + name: ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น + description: ้–ฒ่ฆง่€…ใŒๅœฐๅ›ณไธŠใฎใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ—ใŸๆ™‚ใซ่กจ็คบใ•ใ‚Œใ‚‹ใƒœใƒƒใ‚ฏใ‚นใงใ™ใ€‚ใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ€ๅ‹•็”ปใชใฉใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + propertySchema: + default: + title: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + fields: + title: + title: ใ‚ฟใ‚คใƒˆใƒซ + showTitle: + title: ใ‚ฟใ‚คใƒˆใƒซ่กจ็คบ + position: + title: ไฝ็ฝฎ + choices: + right: ๅณ + middle: ไธญๅคฎ + left: ๅทฆ + size: + title: ใ‚ตใ‚คใ‚บ + choices: + small: ๅฐ + medium: ไธญ + large: ๅคง + heightType: + title: ้ซ˜ใ•่จญๅฎš + choices: + auto: ่‡ชๅ‹• + manual: ๆ‰‹ๅ‹• + height: + title: ้ซ˜ใ• + description: ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚นใฎ้ซ˜ใ•ใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ284pxใƒปๆœ€ๅคง2048px + infoboxPaddingTop: + title: ไฝ™็™ฝไธŠ + description: "ไธŠ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40px" + infoboxPaddingBottom: + title: ไฝ™็™ฝไธ‹ + description: "ไธ‹็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40px" + infoboxPaddingLeft: + title: ไฝ™็™ฝๅทฆ + description: "ๅทฆ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40px" + infoboxPaddingRight: + title: ไฝ™็™ฝๅณ + description: "ๅณ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐpxใƒปๆœ€ๅคง40px" + bgcolor: + title: ่ƒŒๆ™ฏ่‰ฒ + outlineWidth: + title: ็ทšๅน… + outlineColor: + title: ็ทš่‰ฒ + useMask: + title: ่ƒŒๆ™ฏใ‚ชใƒผใƒใƒผใƒฌใ‚ค + typography: + title: ใƒ•ใ‚ฉใƒณใƒˆ + marker: + name: ใƒžใƒผใ‚ซใƒผ + description: ใƒžใƒผใ‚ซใƒผใซใฏใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ‚’็ดใฅใ‘ใ‚‹ใ“ใจใŒใงใใ€้–ฒ่ฆง่€…ใฏใƒžใƒผใ‚ซใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ™ใ‚‹ใ“ใจใงใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่ฆ‹ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + propertySchema: + default: + title: ใƒžใƒผใ‚ซใƒผ + fields: + location: + title: ไฝ็ฝฎ + height: + title: ้ซ˜ๅบฆ + heightReference: + title: ้ซ˜ๅบฆใฎๅŸบๆบ– + description: ๅœฐๅฝขใ‚’ๅŸบๆบ–ใจใ—ใŸ็›ธๅฏพ็š„ใช้ซ˜ๅบฆใซใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใงใฏWGS84ๆฅ•ๅ††ไฝ“ใ‹ใ‚‰ใฎ็ตถๅฏพ็š„ใช้ซ˜ๅบฆใซใชใ‚Šใพใ™ใ€‚ + choices: + none: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + clamp: ๅœฐ่กจใซๅ›บๅฎš + relative: ๅœฐ่กจใ‹ใ‚‰ใฎ้ซ˜ๅบฆ + style: + title: ่กจ็คบๆ–นๆณ• + choices: + point: ็‚น + image: ใ‚ขใ‚คใ‚ณใƒณ + pointColor: + title: ็‚นใฎ่‰ฒ + pointSize: + title: ็‚นใฎๅคงใใ• + pointOutlineColor: + title: ็‚นใฎ็ทšใฎ่‰ฒ + pointOutlineWidth: + title: ็‚นใฎ็ทšใฎๅน… + image: + title: ็”ปๅƒURL + imageSize: + title: ็”ปๅƒใ‚ตใ‚คใ‚บ + imageColor: + title: ็”ปๅƒใฎ่‰ฒ + imageCrop: + title: ๅˆ‡ใ‚ŠๆŠœใ + choices: + none: ใชใ— + circle: ๅ††ๅฝข + imageHorizontalOrigin: + title: ็”ปๅƒใฎไธญๅฟƒๆจช + choices: + left: ๅทฆ + center: ไธญๅคฎ + right: ๅณ + imageVerticalOrigin: + title: ็”ปๅƒใฎไธญๅฟƒ็ธฆ + choices: + top: ไธŠ + center: ไธญๅคฎ + baseline: ใƒ™ใƒผใ‚นใƒฉใ‚คใƒณ + bottom: ไธ‹ + imageShadow: + title: ็”ปๅƒใฎๅฝฑ + imageShadowColor: + title: ็”ปๅƒใฎๅฝฑ่‰ฒ + imageShadowBlur: + title: ็”ปๅƒใฎๅฝฑๅŠๅพ„ + imageShadowPositionX: + title: ็”ปๅƒใฎๅฝฑX + imageShadowPositionY: + title: ็”ปๅƒใฎๅฝฑY + label: + title: ใƒฉใƒ™ใƒซ + labelText: + title: ใƒฉใƒ™ใƒซใฎๆ–‡ๅญ— + labelPosition: + title: ใƒฉใƒ™ใƒซใฎไฝ็ฝฎ + choices: + right: ๅณ + left: ๅทฆ + top: ไธŠ + bottom: ไธ‹ + righttop: ๅณไธŠ + rightbottom: ๅณไธ‹ + lefttop: ๅทฆไธŠ + leftbottom: ๅทฆไธ‹ + labelTypography: + title: ใƒฉใƒ™ใƒซใƒ•ใ‚ฉใƒณใƒˆ + labelBackground: + title: ใƒฉใƒ™ใƒซใฎ่ƒŒๆ™ฏ + extrude: + title: ๅœฐ้ขใ‹ใ‚‰็ทšใ‚’ไผธใฐใ™ + polyline: + name: ็›ด็ทš + propertySchema: + default: + title: ็›ด็ทš + fields: + coordinates: + title: ้ ‚็‚น + strokeColor: + title: ็ทš่‰ฒ + strokeWidth: + title: ็ทšๅน… + polygon: + name: ใƒใƒชใ‚ดใƒณ + propertySchema: + default: + title: ใƒใƒชใ‚ดใƒณ + fields: + polygon: + title: ใƒใƒชใ‚ดใƒณ + fill: + title: ๅก—ใ‚Š + fillColor: + title: ๅก—ใ‚Š่‰ฒ + stroke: + title: ็ทš + strokeColor: + title: ็ทš่‰ฒ + strokeWidth: + title: ็ทšๅน… + rect: + name: ้•ทๆ–นๅฝข + propertySchema: + default: + title: ้•ทๆ–นๅฝข + fields: + rect: + title: ้•ทๆ–นๅฝข + height: + title: ้ซ˜ๅบฆ + heightReference: + title: ้ซ˜ๅบฆใฎๅŸบๆบ– + description: ๅœฐๅฝขใ‚’ๅŸบๆบ–ใจใ—ใŸ็›ธๅฏพ็š„ใช้ซ˜ๅบฆใซใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใงใฏWGS84ๆฅ•ๅ††ไฝ“ใ‹ใ‚‰ใฎ็ตถๅฏพ็š„ใช้ซ˜ๅบฆใซใชใ‚Šใพใ™ใ€‚ + choices: + none: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + clamp: ๅœฐ่กจใซๅ›บๅฎš + relative: ๅœฐ่กจใ‹ใ‚‰ใฎ้ซ˜ๅบฆ + extrudedHeight: + title: ้ซ˜ใ• + style: + title: ใ‚นใ‚ฟใ‚คใƒซ + choices: + color: ่‰ฒ + image: ็”ปๅƒ + fillColor: + title: ๅก—ใ‚Š่‰ฒ + image: + title: ็”ปๅƒURL + outlineColor: + title: ็ทšใฎ่‰ฒ + outlineWidth: + title: ็ทšใฎๅน… + shadows: + title: ๅฝฑ + description: ๅฝฑใ‚’่กจ็คบใ—ใพใ™ใ€‚ๅฟ…ใšใ‚ทใƒผใƒณใฎๅฝฑใฎ่จญๅฎšใ‚‚ๆœ‰ๅŠนใซใ—ใฆใใ ใ•ใ„ใ€‚ใƒฌใƒณใƒ€ใƒชใƒณใ‚ฐใŒ้‡ใใชใ‚‹ใ“ใจใŒใ‚ใ‚Šใพใ™ใ€‚ + choices: + disabled: ็„กๅŠน + enabled: ๆœ‰ๅŠน + cast_only: ๅฝฑใ‚’่ฝใจใ™ใ ใ‘ + receive_only: ๅฝฑใ‚’่ฝใจใ•ใ‚Œใ‚‹ใ ใ‘ + photooverlay: + name: ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค + description: ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚คใ‚’้ธๆŠžใ™ใ‚‹ใจใ€่จญๅฎšใ—ใŸ็”ปๅƒใ‚’ใƒขใƒผใƒ€ใƒซๅฝขๅผใง็”ป้ขไธŠใซ่ขซใ›ใฆ่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + propertySchema: + default: + title: ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค + fields: + location: + title: ไฝ็ฝฎ + height: + title: ้ซ˜ๅบฆ + heightReference: + title: ้ซ˜ๅบฆใฎๅŸบๆบ– + description: ๅœฐๅฝขใ‚’ๅŸบๆบ–ใจใ—ใŸ็›ธๅฏพ็š„ใช้ซ˜ๅบฆใซใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใงใฏWGS84ๆฅ•ๅ††ไฝ“ใ‹ใ‚‰ใฎ็ตถๅฏพ็š„ใช้ซ˜ๅบฆใซใชใ‚Šใพใ™ใ€‚ + choices: + none: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + clamp: ๅœฐ่กจใซๅ›บๅฎš + relative: ๅœฐ่กจใ‹ใ‚‰ใฎ้ซ˜ๅบฆ + camera: + title: ใ‚ซใƒกใƒฉ + description: ใ‚ฏใƒชใƒƒใ‚ฏใ•ใ‚ŒใŸใจใใซ็งปๅ‹•ใ™ใ‚‹ใ‚ซใƒกใƒฉใฎ่จญๅฎšใ‚’ใ—ใพใ™ใ€‚ + image: + title: ใ‚ขใ‚คใ‚ณใƒณ + imageSize: + title: ใ‚ขใ‚คใ‚ณใƒณใ‚ตใ‚คใ‚บ + imageHorizontalOrigin: + title: ใ‚ขใ‚คใ‚ณใƒณใฎไธญๅฟƒๆจช + choices: + left: ๅทฆ + center: ไธญๅคฎ + right: ๅณ + imageVerticalOrigin: + title: ใ‚ขใ‚คใ‚ณใƒณใฎไธญๅฟƒ็ธฆ + choices: + top: ไธŠ + center: ไธญๅคฎ + baseline: ใƒ™ใƒผใ‚นใƒฉใ‚คใƒณ + bottom: ไธ‹ + imageCrop: + title: ๅˆ‡ใ‚ŠๆŠœใ + choices: + none: ใชใ— + circle: ๅ††ๅฝข + imageShadow: + title: ใ‚ขใ‚คใ‚ณใƒณใฎๅฝฑ + imageShadowColor: + title: ๅฝฑ่‰ฒ + imageShadowBlur: + title: ๅฝฑๅŠๅพ„ + imageShadowPositionX: + title: ๅฝฑX + imageShadowPositionY: + title: ๅฝฑY + photoOverlayImage: + title: ใ‚ชใƒผใƒใƒฌใ‚ค็”ปๅƒ + photoOverlayDescription: + title: ใ‚ชใƒผใƒใƒฌใ‚ค็”ปๅƒใฎ่ชฌๆ˜Ž + ellipsoid: + name: ็ƒไฝ“ + description: ็ซ‹ไฝ“็š„ใช็ƒไฝ“ใ‚’ๅœฐๅ›ณไธŠใซ่กจ็คบใงใใพใ™ใ€‚ + propertySchema: + default: + title: ็ƒไฝ“ + fields: + position: + title: ไฝ็ฝฎ + height: + title: ้ซ˜ๅบฆ + heightReference: + title: ้ซ˜ๅบฆใฎๅŸบๆบ– + description: ๅœฐๅฝขใ‚’ๅŸบๆบ–ใจใ—ใŸ็›ธๅฏพ็š„ใช้ซ˜ๅบฆใซใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใงใฏWGS84ๆฅ•ๅ††ไฝ“ใ‹ใ‚‰ใฎ็ตถๅฏพ็š„ใช้ซ˜ๅบฆใซใชใ‚Šใพใ™ใ€‚ + choices: + none: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + clamp: ๅœฐ่กจใซๅ›บๅฎš + relative: ๅœฐ่กจใ‹ใ‚‰ใฎ้ซ˜ๅบฆ + radius: + title: ๅŠๅพ„ + fillColor: + title: ๅก—ใ‚Š่‰ฒ + shadows: + title: ๅฝฑ + description: ๅฝฑใ‚’่กจ็คบใ—ใพใ™ใ€‚ๅฟ…ใšใ‚ทใƒผใƒณใฎๅฝฑใฎ่จญๅฎšใ‚‚ๆœ‰ๅŠนใซใ—ใฆใใ ใ•ใ„ใ€‚ใƒฌใƒณใƒ€ใƒชใƒณใ‚ฐใŒ้‡ใใชใ‚‹ใ“ใจใŒใ‚ใ‚Šใพใ™ใ€‚ + choices: + disabled: ็„กๅŠน + enabled: ๆœ‰ๅŠน + cast_only: ๅฝฑใ‚’่ฝใจใ™ใ ใ‘ + receive_only: ๅฝฑใ‚’่ฝใจใ•ใ‚Œใ‚‹ใ ใ‘ + model: + name: 3Dใƒขใƒ‡ใƒซ + description: glTFๅฝขๅผใฎ3Dใƒขใƒ‡ใƒซใ‚’่ชญใฟ่พผใ‚“ใง่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ๅพŒใ€URLใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใง่ชญใฟ่พผใพใ‚Œใพใ™ใ€‚ + propertySchema: + default: + title: 3Dใƒขใƒ‡ใƒซ + fields: + model: + title: URL + description: glTFๅฝขๅผใฎใฟๅฏพๅฟœใ—ใฆใ„ใพใ™ใ€‚ + location: + title: ไฝ็ฝฎ + height: + title: ้ซ˜ๅบฆ + heightReference: + title: ้ซ˜ๅบฆใฎๅŸบๆบ– + description: ๅœฐๅฝขใ‚’ๅŸบๆบ–ใจใ—ใŸ็›ธๅฏพ็š„ใช้ซ˜ๅบฆใซใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใงใฏWGS84ๆฅ•ๅ††ไฝ“ใ‹ใ‚‰ใฎ็ตถๅฏพ็š„ใช้ซ˜ๅบฆใซใชใ‚Šใพใ™ใ€‚ + choices: + none: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + clamp: ๅœฐ่กจใซๅ›บๅฎš + relative: ๅœฐ่กจใ‹ใ‚‰ใฎ้ซ˜ๅบฆ + heading: + title: ใƒ˜ใƒƒใƒ‡ใ‚ฃใƒณใ‚ฐ + pitch: + title: ใƒ”ใƒƒใƒ + roll: + title: ใƒญใƒผใƒซ + scale: + title: ใ‚นใ‚ฑใƒผใƒซ + description: 3Dใƒขใƒ‡ใƒซใ‚’่จญๅฎšๅ€คๅˆ†็ญ‰ๅ€ใ—ใฆ่กจ็คบใ—ใพใ™ใ€‚ + maximumScale: + title: ๆœ€ๅคงใ‚นใ‚ฑใƒผใƒซ + minimumPixelSize: + title: ๆœ€ๅฐ่กจ็คบใ‚ตใ‚คใ‚บ + animation: + title: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณ + description: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚ๅฟ…ใšใ‚ทใƒผใƒณใฎใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใฎ่จญๅฎšใ‚‚ๆœ‰ๅŠนใซใ—ใฆใใ ใ•ใ„ใ€‚ + appearance: + title: ๅค–่ฆณ + fields: + shadows: + title: ๅฝฑ + description: ๅฝฑใ‚’่กจ็คบใ—ใพใ™ใ€‚ๅฟ…ใšใ‚ทใƒผใƒณใฎๅฝฑใฎ่จญๅฎšใ‚‚ๆœ‰ๅŠนใซใ—ใฆใใ ใ•ใ„ใ€‚ใƒฌใƒณใƒ€ใƒชใƒณใ‚ฐใŒ้‡ใใชใ‚‹ใ“ใจใŒใ‚ใ‚Šใพใ™ใ€‚ + choices: + disabled: ็„กๅŠน + enabled: ๆœ‰ๅŠน + cast_only: ๅฝฑใ‚’่ฝใจใ™ใ ใ‘ + receive_only: ๅฝฑใ‚’่ฝใจใ•ใ‚Œใ‚‹ใ ใ‘ + colorBlend: + title: ่‰ฒใฎใƒ–ใƒฌใƒณใƒ‰ + choices: + none: ็„กๅŠน + highlight: ใƒใ‚คใƒฉใ‚คใƒˆ + replace: ็ฝฎใๆ›ใˆ + mix: ๆททๅˆ + color: + title: ่‰ฒ + colorBlendAmount: + title: ่‰ฒใฎๆททๅˆ้‡ + lightColor: + title: ็…งๆ˜Ž่‰ฒ + silhouette: + title: ใ‚ทใƒซใ‚จใƒƒใƒˆ + silhouetteColor: + title: ใ‚ทใƒซใ‚จใƒƒใƒˆ่‰ฒ + silhouetteSize: + title: ใ‚ทใƒซใ‚จใƒƒใƒˆใ‚ตใ‚คใ‚บ + tileset: + name: 3Dใ‚ฟใ‚คใƒซ + description: 3D Tilesๅฝขๅผใฎ3Dใ‚ฟใ‚คใƒซใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + propertySchema: + default: + title: 3Dใ‚ฟใ‚คใƒซ + fields: + tileset: + title: ใ‚ฟใ‚คใƒซใ‚ปใƒƒใƒˆURL + description: 3Dใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟๅ†…ใฎ tileset.json ใฎURL + styleUrl: + title: ใ‚นใ‚ฟใ‚คใƒซURL + description: 3D Tiles styles ใŒ่จ˜่ฟฐใ•ใ‚ŒใŸJSONใฎURLใ€‚ใ‚นใ‚ฟใ‚คใƒซใ‚’้ฉ็”จใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚่จญๅฎšใฏไปปๆ„ใงใ™ใ€‚ + shadows: + title: ๅฝฑ + description: ๅฝฑใ‚’่กจ็คบใ—ใพใ™ใ€‚ๅฟ…ใšใ‚ทใƒผใƒณใฎๅฝฑใฎ่จญๅฎšใ‚‚ๆœ‰ๅŠนใซใ—ใฆใใ ใ•ใ„ใ€‚ใƒฌใƒณใƒ€ใƒชใƒณใ‚ฐใŒ้‡ใใชใ‚‹ใ“ใจใŒใ‚ใ‚Šใพใ™ใ€‚ + choices: + disabled: ็„กๅŠน + enabled: ๆœ‰ๅŠน + cast_only: ๅฝฑใ‚’่ฝใจใ™ใ ใ‘ + receive_only: ๅฝฑใ‚’่ฝใจใ•ใ‚Œใ‚‹ใ ใ‘ + resource: + name: ใƒ•ใ‚กใ‚คใƒซ + description: ๅค–้ƒจใ‹ใ‚‰ใƒ‡ใƒผใ‚ฟใ‚’ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ๅพŒใ€URLใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใงๅค–้ƒจใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใพใ‚Œใพใ™ใ€‚ + propertySchema: + default: + title: ใƒ•ใ‚กใ‚คใƒซ + fields: + url: + title: ใƒ•ใ‚กใ‚คใƒซ URL + choices: + auto: ่‡ชๅ‹• + kml: KML + geojson: GeoJSON / TopoJSON + czml: CZML + clampToGround: + title: ๅœฐ่กจใซๅ›บๅฎš + description: ๅœฐ่กจใฎๅ‡นๅ‡ธใซๆฒฟใ†ใ‚ˆใ†ใซใƒใƒชใ‚ดใƒณใชใฉใฎใƒ‡ใƒผใ‚ฟใ‚’่กจ็คบใ—ใพใ™ใ€‚ + textblock: + name: ใƒ†ใ‚ญใ‚นใƒˆ + description: Text block + propertySchema: + default: + title: ใƒ†ใ‚ญใ‚นใƒˆใƒ–ใƒญใƒƒใ‚ฏ + fields: + title: + title: ใ‚ฟใ‚คใƒˆใƒซ + text: + title: ใ‚ณใƒณใƒ†ใƒณใƒ„ + markdown: + title: ใƒžใƒผใ‚ฏใƒ€ใ‚ฆใƒณ + paddingTop: + title: ไฝ™็™ฝไธŠ + description: "ไธŠ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" + paddingBottom: + title: ไฝ™็™ฝไธ‹ + description: "ไธ‹็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" + paddingLeft: + title: ไฝ™็™ฝๅทฆ + description: "ๅทฆ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" + paddingRight: + title: ไฝ™็™ฝๅณ + description: "ๅณ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" + typography: + title: ใƒ•ใ‚ฉใƒณใƒˆ + imageblock: + name: ็”ปๅƒ + description: ็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ + propertySchema: + default: + title: ็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ + fields: + image: + title: ็”ปๅƒ + title: + title: ใ‚ฟใ‚คใƒˆใƒซ + fullSize: + title: ใƒ•ใƒซใ‚ตใ‚คใ‚บ + imageSize: + title: ็”ปๅƒใ‚ตใ‚คใ‚บ + choices: + cover: ใ‚ซใƒใƒผ + contain: ๅซใ‚€ + imagePositionX: + title: ๆฐดๅนณไฝ็ฝฎ + choices: + left: ๅทฆ + center: ไธญๅคฎ + right: ๅณ + imagePositionY: + title: ๅž‚็›ดไฝ็ฝฎ + choices: + top: ไธŠ + center: ไธญๅคฎ + bottom: ไธ‹ + videoblock: + name: ๅ‹•็”ป + description: ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ + propertySchema: + default: + title: ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ + fields: + url: + title: ๅ‹•็”ป URL + title: + title: ใ‚ฟใ‚คใƒˆใƒซ + fullSize: + title: ใƒ•ใƒซใ‚ตใ‚คใ‚บ + locationblock: + name: ไฝ็ฝฎๆƒ…ๅ ฑ + description: ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ + propertySchema: + default: + title: ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ + fields: + location: + title: ไฝ็ฝฎๆƒ…ๅ ฑ + title: + title: ใ‚ฟใ‚คใƒˆใƒซ + fullSize: + title: ใƒ•ใƒซใ‚ตใ‚คใ‚บ + dlblock: + name: ่กจ + description: ่กจใƒ–ใƒญใƒƒใ‚ฏ + propertySchema: + default: + title: ่กจใƒ–ใƒญใƒƒใ‚ฏ + fields: + title: + title: ใ‚ฟใ‚คใƒˆใƒซ + typography: + title: ใƒ•ใ‚ฉใƒณใƒˆ + items: + title: ใ‚ขใ‚คใƒ†ใƒ  + fields: + item_title: + title: ใ‚ฟใ‚คใƒˆใƒซ + item_datatype: + title: ็จฎ้กž + choices: + string: ๆ–‡ๅญ— + number: ๆ•ฐๅญ— + item_datastr: + title: ใƒ‡ใƒผใ‚ฟ(ๆ–‡ๅญ—) + item_datanum: + title: ใƒ‡ใƒผใ‚ฟ(ๆ•ฐๅญ—) + menu: + name: ใƒกใƒ‹ใƒฅใƒผ (ๅปƒๆญขไบˆๅฎš) + description: | + ใ‚ทใƒผใƒณใซใƒœใ‚ฟใƒณใ‚’่จญ็ฝฎใ—ใ€ใƒกใƒ‹ใƒฅใƒผใ‚’่กจ็คบใ—ใพใ™ใ€‚่ฟฝๅŠ ใ—ใŸใƒœใ‚ฟใƒณใซ่จญๅฎšใ•ใ‚ŒใŸใ‚ขใ‚ฏใ‚ทใƒงใƒณใ‚ฟใ‚คใƒ—ใซใ‚ˆใฃใฆๅ‹•ไฝœใŒๅค‰ใ‚ใ‚Šใพใ™ใ€‚ + ใƒปใƒชใƒณใ‚ฏ๏ผšใƒœใ‚ฟใƒณ่‡ชไฝ“ใŒๅค–้ƒจใ‚ตใ‚คใƒˆใธใฎใƒชใƒณใ‚ฏใซใชใ‚Šใพใ™ใ€‚ + ใƒปใƒกใƒ‹ใƒฅใƒผ๏ผš่ฟฝๅŠ ใ—ใŸใƒกใƒ‹ใƒฅใƒผใ‚’้–‹ใใพใ™ใ€‚ + ใƒปใ‚ซใƒกใƒฉใ‚ขใ‚ฏใ‚ทใƒงใƒณ๏ผšใ‚ฏใƒชใƒƒใ‚ฏๆ™‚ใซใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ—ใพใ™ใ€‚ + propertySchema: + buttons: + title: ใƒœใ‚ฟใƒณ + fields: + buttonInvisible: + title: ้ž่กจ็คบ + buttonTitle: + title: ใ‚ฟใ‚คใƒˆใƒซ + buttonPosition: + title: ่กจ็คบไฝ็ฝฎ + choices: + topleft: ไธŠๅทฆ + topright: ไธŠๅณ + bottomleft: ไธ‹ๅทฆ + bottomright: ไธ‹ๅณ + buttonStyle: + title: ่กจ็คบๆ–นๆณ• + choices: + text: ใƒ†ใ‚ญใ‚นใƒˆใฎใฟ + icon: ใ‚ขใ‚คใ‚ณใƒณใฎใฟ + texticon: ใƒ†ใ‚ญใ‚นใƒˆ๏ผ‹ใ‚ขใ‚คใ‚ณใƒณ + buttonIcon: + title: ใ‚ขใ‚คใ‚ณใƒณ + buttonColor: + title: ใƒ†ใ‚ญใ‚นใƒˆ่‰ฒ + buttonBgcolor: + title: ่ƒŒๆ™ฏ่‰ฒ + buttonType: + title: ใ‚ขใ‚ฏใ‚ทใƒงใƒณ + choices: + link: ใƒชใƒณใ‚ฏ + menu: ใƒกใƒ‹ใƒฅใƒผ้–‹้–‰ + camera: ใ‚ซใƒกใƒฉ็งปๅ‹• + buttonLink: + title: ใƒชใƒณใ‚ฏ + buttonCamera: + title: ใ‚ซใƒกใƒฉ + menu: + title: ใƒกใƒ‹ใƒฅใƒผ + fields: + menuTitle: + title: ใ‚ฟใ‚คใƒˆใƒซ + menuIcon: + title: ใ‚ขใ‚คใ‚ณใƒณ + menuType: + title: ใ‚ขใ‚ฏใ‚ทใƒงใƒณ + choices: + link: ใƒชใƒณใ‚ฏ + camera: ใ‚ซใƒกใƒฉ็งปๅ‹• + border: ๅŒบๅˆ‡ใ‚Š็ทš + menuLink: + title: ใƒชใƒณใ‚ฏ + menuCamera: + title: ใ‚ซใƒกใƒฉ + button: + name: ใƒœใ‚ฟใƒณ + description: | + ใ‚ทใƒผใƒณใซใƒœใ‚ฟใƒณใ‚’่จญ็ฝฎใ—ใ€ใƒกใƒ‹ใƒฅใƒผใ‚’่กจ็คบใ—ใพใ™ใ€‚่ฟฝๅŠ ใ—ใŸใƒœใ‚ฟใƒณใซ่จญๅฎšใ•ใ‚ŒใŸใ‚ขใ‚ฏใ‚ทใƒงใƒณใ‚ฟใ‚คใƒ—ใซใ‚ˆใฃใฆๅ‹•ไฝœใŒๅค‰ใ‚ใ‚Šใพใ™ใ€‚ + ใ€€ใƒปใƒชใƒณใ‚ฏ๏ผšใƒœใ‚ฟใƒณ่‡ชไฝ“ใŒๅค–้ƒจใ‚ตใ‚คใƒˆใธใฎใƒชใƒณใ‚ฏใซใชใ‚Šใพใ™ใ€‚ + ใ€€ใƒปใƒกใƒ‹ใƒฅใƒผ๏ผš่ฟฝๅŠ ใ—ใŸใƒกใƒ‹ใƒฅใƒผใ‚’้–‹ใใพใ™ใ€‚ + ใ€€ใƒปใ‚ซใƒกใƒฉใ‚ขใ‚ฏใ‚ทใƒงใƒณ๏ผšใ‚ฏใƒชใƒƒใ‚ฏๆ™‚ใซใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ—ใพใ™ใ€‚ + propertySchema: + default: + title: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + fields: + buttonTitle: + title: ใ‚ฟใ‚คใƒˆใƒซ + buttonStyle: + title: ่กจ็คบๆ–นๆณ• + choices: + text: ใƒ†ใ‚ญใ‚นใƒˆใฎใฟ + icon: ใ‚ขใ‚คใ‚ณใƒณใฎใฟ + texticon: ใƒ†ใ‚ญใ‚นใƒˆ๏ผ‹ใ‚ขใ‚คใ‚ณใƒณ + buttonIcon: + title: ใ‚ขใ‚คใ‚ณใƒณ + buttonColor: + title: ใƒ†ใ‚ญใ‚นใƒˆ่‰ฒ + buttonBgcolor: + title: ่ƒŒๆ™ฏ่‰ฒ + buttonType: + title: ใ‚ขใ‚ฏใ‚ทใƒงใƒณ + choices: + link: ใƒชใƒณใ‚ฏ + menu: ใƒกใƒ‹ใƒฅใƒผ้–‹้–‰ + camera: ใ‚ซใƒกใƒฉ็งปๅ‹• + buttonLink: + title: ใƒชใƒณใ‚ฏ + buttonCamera: + title: ใ‚ซใƒกใƒฉ + menu: + title: ใƒกใƒ‹ใƒฅใƒผ + fields: + menuTitle: + title: ใ‚ฟใ‚คใƒˆใƒซ + menuIcon: + title: ใ‚ขใ‚คใ‚ณใƒณ + menuType: + title: ใ‚ขใ‚ฏใ‚ทใƒงใƒณ + choices: + link: ใƒชใƒณใ‚ฏ + camera: ใ‚ซใƒกใƒฉ็งปๅ‹• + border: ๅŒบๅˆ‡ใ‚Š็ทš + menuLink: + title: ใƒชใƒณใ‚ฏ + menuCamera: + title: ใ‚ซใƒกใƒฉ + splashscreen: + name: ใ‚นใƒ—ใƒฉใƒƒใ‚ทใƒฅใ‚นใ‚ฏใƒชใƒผใƒณ + description: ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒใ€ๆœ€ๅˆใซ่กจ็คบใ•ใ‚Œใ‚‹ๆผ”ๅ‡บใ‚’่จญๅฎšใงใใพใ™ใ€‚ไพ‹ใˆใฐใ€ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ฟใ‚คใƒˆใƒซใ‚’้–ฒ่ฆง่€…ใซ่ฆ‹ใ›ใŸใ‚Šใ€ใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ•ใ›ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + propertySchema: + overlay: + title: ใ‚ชใƒผใƒใƒผใƒฌใ‚ค + fields: + overlayEnabled: + title: ๆœ‰ๅŠน + overlayDelay: + title: ้–‹ๅง‹ๆ™‚้–“ + overlayDuration: + title: ่กจ็คบๆ™‚้–“ + overlayTransitionDuration: + title: ใƒ•ใ‚งใƒผใƒ‰ๆ™‚้–“ + overlayImage: + title: ใ‚ชใƒผใƒใƒผใƒฌใ‚ค็”ปๅƒ + overlayImageW: + title: ็”ปๅƒๅน… + overlayImageH: + title: ็”ปๅƒ้ซ˜ใ• + overlayBgcolor: + title: ่ƒŒๆ™ฏ่‰ฒ + camera: + title: ใ‚ซใƒกใƒฉใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณ + fields: + cameraPosition: + title: ใ‚ซใƒกใƒฉไฝ็ฝฎ + cameraDuration: + title: ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“ + cameraDelay: + title: ใ‚ซใƒกใƒฉๅพ…ๆฉŸๆ™‚้–“ + storytelling: + name: ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐ + description: ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐๆฉŸ่ƒฝใ‚’ไฝฟใˆใฐใ€ใƒ‡ใƒผใ‚ฟ้–“ใฎ็น‹ใŒใ‚Šใ‚„ๆ™‚็ณปๅˆ—ใ‚’ใ‚‚ใจใซใ€้ †็•ชใซ่ณ‡ๆ–™ใ‚’้–ฒ่ฆงใ—ใฆใ‚‚ใ‚‰ใ†ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚ไฝฟ็”จใ™ใ‚‹ใซใฏใ€ๅณใƒ‘ใƒใƒซใ‹ใ‚‰ๅœฐ็ƒไธŠใฎใƒฌใ‚คใƒคใƒผใซ้ †็•ชใ‚’ไป˜ไธŽใ—ใพใ™ใ€‚ + propertySchema: + default: + title: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + fields: + duration: + title: ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“ + range: + title: ็”ป่ง’ + camera: + title: ใ‚ซใƒกใƒฉ + autoStart: + title: ่‡ชๅ‹•ๅ†็”Ÿ + stories: + title: ใ‚นใƒˆใƒผใƒชใƒผ + fields: + layer: + title: ใƒฌใ‚คใƒคใƒผ + layerDuration: + title: ็งปๅ‹•ๆ™‚้–“ + layerRange: + title: ใ‚ซใƒกใƒฉ็”ป่ง’ + layerCamera: + title: ใ‚ซใƒกใƒฉ + cluster: + name: ใ‚ฏใƒฉใ‚นใ‚ฟ + description: ใƒฌใ‚คใƒคใƒผใ‚’่‡ชๅ‹•็š„ใซใพใจใ‚ใฆ่กจ็คบใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใชใ‚ฏใƒฉใ‚นใ‚ฟใ‚’่จญๅฎšใ—ใพใ™ใ€‚ + propertySchema: + default: + title: ใ‚ฏใƒฉใ‚นใ‚ฟ + fields: + clusterPixelRange: + title: ๆœ€ๅฐ็ฏ„ๅ›ฒ + description: ็”ป้ขไธŠใฎไฝ•ใƒ”ใ‚ฏใ‚ปใƒซๅˆ†ใฎ็ฏ„ๅ›ฒใซใ‚ใ‚‹ใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒฉใ‚นใ‚ฟใซใพใจใ‚ใ‚‹ใ‹ใ‚’ๆœ€ๅฐๅ€คใงๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + clusterMinSize: + title: ๆœ€ๅฐใ‚ตใ‚คใ‚บ + description: ใ‚ฏใƒฉใ‚นใ‚ฟใŒ่กจ็คบใ•ใ‚Œใ‚‹ใฎใซๅฟ…่ฆใชๆœ€ๅฐใฎใƒฌใ‚คใƒคใƒผๆ•ฐ + clusterImage: + title: ็”ปๅƒ + description: ็”ป้ขไธŠใง่กจ็คบใ•ใ‚Œใ‚‹ใ‚ฏใƒฉใ‚นใ‚ฟใฎ็”ปๅƒใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + clusterImageWidth: + title: ็”ปๅƒๅน… + description: ็”ปๅƒใฎๅน…ใ‚’ใƒ”ใ‚ฏใ‚ปใƒซใงๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + clusterImageHeight: + title: ็”ปๅƒ้ซ˜ใ• + description: ็”ปๅƒใฎ้ซ˜ใ•ใ‚’ใƒ”ใ‚ฏใ‚ปใƒซใงๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + clusterLabelTypography: + title: ใƒฉใƒ™ใƒซ + description: ใƒฉใƒ™ใƒซใฎใ‚นใ‚ฟใ‚คใƒซใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + timeline: + name: ใ‚ฟใ‚คใƒ ใƒฉใ‚คใƒณ + description: ๆ™‚็ณปๅˆ—ใƒ‡ใƒผใ‚ฟใ‚’่กจ็คบใ™ใ‚‹ใŸใ‚ใซใ€ๆ™‚ๅˆปใ‚’ๅค‰ๆ›ดใ—ใŸใ‚Šๆ™‚้–“ใ‚’ๅ†็”Ÿใ—ใŸใ‚Šใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ diff --git a/server/pkg/builtin/migration.go b/server/pkg/builtin/migration.go new file mode 100644 index 000000000..ff11e348a --- /dev/null +++ b/server/pkg/builtin/migration.go @@ -0,0 +1,3 @@ +package builtin + +// TODO: migration code diff --git a/server/pkg/cache/cache.go b/server/pkg/cache/cache.go new file mode 100644 index 000000000..dee12b692 --- /dev/null +++ b/server/pkg/cache/cache.go @@ -0,0 +1,56 @@ +package cache + +import ( + "context" + "sync" + "time" +) + +// Cache holds data can be accessed synchronously. The data will be automatically updated when it expires. +type Cache[T any] struct { + updater func(context.Context, T) (T, error) + expiresIn time.Duration + updatedAt time.Time + lock sync.Mutex + data T + now func() time.Time +} + +func New[T any](updater func(context.Context, T) (T, error), expiresIn time.Duration) *Cache[T] { + return &Cache[T]{updater: updater, expiresIn: expiresIn} +} + +func (c *Cache[T]) Get(ctx context.Context) (res T, _ error) { + if c == nil { + return + } + + c.lock.Lock() + defer c.lock.Unlock() + + if c.updatedAt.IsZero() || c.updatedAt.Add(c.expiresIn).Before(c.currentTime()) { + if err := c.update(ctx); err != nil { + return c.data, err + } + } + return c.data, nil +} + +func (c *Cache[T]) update(ctx context.Context) error { + var err error + data, err := c.updater(ctx, c.data) + if err != nil { + return err + } + + c.data = data + c.updatedAt = c.currentTime() + return nil +} + +func (c *Cache[T]) currentTime() time.Time { + if c.now == nil { + return time.Now() + } + return c.now() +} diff --git a/server/pkg/cache/cache_test.go b/server/pkg/cache/cache_test.go new file mode 100644 index 000000000..30b149a62 --- /dev/null +++ b/server/pkg/cache/cache_test.go @@ -0,0 +1,81 @@ +package cache + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestCache_Get(t *testing.T) { + ctx := context.Background() + data := &struct{}{} + err := errors.New("err!") + var cache *Cache[*struct{}] + called := 0 + + res, e := cache.Get(ctx) // nil cache + assert.NoError(t, e) + assert.Nil(t, res) + + cache = New(func(c context.Context, i *struct{}) (*struct{}, error) { + assert.Same(t, ctx, c) + if called == 0 { + assert.Nil(t, i) + } else { + assert.Same(t, cache.data, i) + } + called++ + if called == 3 { + return data, err + } + return data, nil + }, time.Duration(0)) // duration 0 means data will be updated every time + + res, e = cache.Get(ctx) // first + assert.NoError(t, e) + assert.Same(t, data, res) + assert.Equal(t, 1, called) + + res, e = cache.Get(ctx) // second + assert.NoError(t, e) + assert.Same(t, data, res) + assert.Equal(t, 2, called) + + res, e = cache.Get(ctx) // third + assert.Same(t, err, e) + assert.Same(t, data, res) + assert.Equal(t, 3, called) +} + +func TestCache_Get2(t *testing.T) { + ctx := context.Background() + data := &struct{}{} + now := time.Date(2022, 6, 4, 0, 0, 0, 0, time.UTC) + called := 0 + + cache := New(func(_ context.Context, _ *struct{}) (*struct{}, error) { + called++ + return data, nil + }, time.Second) + cache.now = func() time.Time { return now } + + assert.Equal(t, 0, called) + _, _ = cache.Get(ctx) + assert.Equal(t, 1, called) + _, _ = cache.Get(ctx) + assert.Equal(t, 1, called) + now = now.Add(time.Millisecond) + _, _ = cache.Get(ctx) + assert.Equal(t, 1, called) + now = now.Add(time.Second) + _, _ = cache.Get(ctx) + assert.Equal(t, 2, called) + _, _ = cache.Get(ctx) + assert.Equal(t, 2, called) + now = now.Add(time.Second * 2) + _, _ = cache.Get(ctx) + assert.Equal(t, 3, called) +} diff --git a/server/pkg/config/config.go b/server/pkg/config/config.go new file mode 100644 index 000000000..a0f48115f --- /dev/null +++ b/server/pkg/config/config.go @@ -0,0 +1,29 @@ +package config + +import "sort" + +type Config struct { + Migration int64 + Auth *Auth +} + +type Auth struct { + Cert string + Key string +} + +func (c *Config) NextMigrations(migrations []int64) []int64 { + migrations2 := append([]int64{}, migrations...) + sort.SliceStable(migrations2, func(i, j int) bool { return migrations2[i] < migrations2[j] }) + + for i, m := range migrations2 { + if len(migrations2) <= i { + return nil + } + if c.Migration < m { + return migrations2[i:] + } + } + + return nil +} diff --git a/server/pkg/config/config_test.go b/server/pkg/config/config_test.go new file mode 100644 index 000000000..5492aaebe --- /dev/null +++ b/server/pkg/config/config_test.go @@ -0,0 +1,14 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestConfigNextMigrations(t *testing.T) { + c := &Config{ + Migration: 100, + } + assert.Equal(t, []int64{200, 500}, c.NextMigrations([]int64{1, 100, 500, 200, 2})) +} diff --git a/server/pkg/czml/czml.go b/server/pkg/czml/czml.go new file mode 100644 index 000000000..bc6d24285 --- /dev/null +++ b/server/pkg/czml/czml.go @@ -0,0 +1,45 @@ +package czml + +type Feature struct { + Id string `json:"id"` + Name string `json:"name"` + Polygon *Polygon `json:"polygon,omitempty"` + Polyline *Polyline `json:"polyline,omitempty"` + Position *Position `json:"position,omitempty"` + Point *Point `json:"point,omitempty"` +} +type Polyline struct { + Positions Position `json:"positions"` + Material *Material `json:"material,omitempty"` + Width float64 `json:"width,omitempty"` +} +type Polygon struct { + Positions Position `json:"positions"` + Fill bool `json:"fill,omitempty"` + Material *Material `json:"material,omitempty"` + Stroke bool `json:"outline,omitempty"` + StrokeColor *Color `json:"outlineColor,omitempty"` + StrokeWidth float64 `json:"outlineWidth,omitempty"` +} +type Point struct { + Color string `json:"color,omitempty"` + PixelSize float64 `json:"pixelSize,omitempty"` +} +type Position struct { + CartographicDegrees []float64 `json:"cartographicDegrees"` +} +type Material struct { + SolidColor *SolidColor `json:"solidColor,omitempty"` + PolylineOutline *PolylineOutline `json:"polylineOutline,omitempty"` +} +type PolylineOutline struct { + Color *Color `json:"color"` +} +type SolidColor struct { + Color *Color `json:"color"` +} +type Color struct { + RGBA []int64 `json:"rgba,omitempty"` + RGBAF []float64 `json:"rgbaf,omitempty"` + Reference string `json:"reference,omitempty"` +} diff --git a/server/pkg/dataset/builder.go b/server/pkg/dataset/builder.go new file mode 100644 index 000000000..7fb866b1e --- /dev/null +++ b/server/pkg/dataset/builder.go @@ -0,0 +1,82 @@ +package dataset + +type Builder struct { + d *Dataset +} + +func New() *Builder { + return &Builder{d: &Dataset{}} +} + +func (b *Builder) Build() (*Dataset, error) { + if b.d.id.IsNil() { + return nil, ErrInvalidID + } + if b.d.fields == nil || b.d.order == nil { + b.d.fields = map[FieldID]*Field{} + b.d.order = []FieldID{} + } + return b.d, nil +} + +func (b *Builder) MustBuild() *Dataset { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *Builder) ID(id ID) *Builder { + b.d.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.d.id = NewID() + return b +} + +func (b *Builder) Scene(scene SceneID) *Builder { + b.d.scene = scene + return b +} + +func (b *Builder) Source(source string) *Builder { + b.d.source = source + return b +} + +func (b *Builder) Schema(schema SchemaID) *Builder { + b.d.schema = schema + return b +} + +func (b *Builder) Fields(fields []*Field) *Builder { + b.d.fields = map[FieldID]*Field{} + b.d.order = make([]FieldID, 0, len(fields)) + + sources := map[string]struct{}{} + for _, f := range b.d.fields { + if source := f.Source(); source != "" { + sources[source] = struct{}{} + } + } + + for _, f := range fields { + if f.IsEmpty() { + continue + } + source := f.Source() + if source == "" { + b.d.fields[f.Field()] = f + b.d.order = append(b.d.order, f.Field()) + } else if _, ok := sources[source]; !ok { + b.d.fields[f.Field()] = f + b.d.order = append(b.d.order, f.Field()) + sources[source] = struct{}{} + } + } + + return b +} diff --git a/server/pkg/dataset/csvparser.go b/server/pkg/dataset/csvparser.go new file mode 100644 index 000000000..3d9bbcb7d --- /dev/null +++ b/server/pkg/dataset/csvparser.go @@ -0,0 +1,207 @@ +package dataset + +import ( + "encoding/csv" + "errors" + "io" + "strconv" + "strings" +) + +var ( + ErrFailedToParseCSVorTSVFile error = errors.New("failed to parse file content") + ErrIncompatibleSchema error = errors.New("schema is not compatible with csv") + ErrDuplicatiedNameFields error = errors.New("failed to parse, name-duplicated fields") +) + +type DatasetCSVParser struct { + reader *csv.Reader + firstline []string + headers []string + schema *Schema + name string +} + +func NewCSVParser(r io.Reader, n string, seperator rune) *DatasetCSVParser { + r2 := csv.NewReader(r) + r2.Comma = seperator + obj := &DatasetCSVParser{ + reader: r2, + name: n, + } + return obj +} + +func (p *DatasetCSVParser) Init() error { + headers, err := p.reader.Read() + if err != nil { + return ErrFailedToParseCSVorTSVFile + } + p.headers = headers + p.firstline, err = p.reader.Read() + if err != nil { + return ErrFailedToParseCSVorTSVFile + } + return nil +} +func (p *DatasetCSVParser) validateLine(line []string) bool { + return len(p.headers) == len(line) +} + +func (p *DatasetCSVParser) GuessSchema(sid SceneID) error { + if !p.validateLine(p.firstline) { + return ErrFailedToParseCSVorTSVFile + } + schemafields := []*SchemaField{} + haslat, haslng := false, false + for k, h := range p.headers { + if h == "lat" { + haslat = true + } + if h == "lng" { + haslng = true + } + if h != "lng" && h != "lat" && strings.TrimSpace(h) != "" { + t := ValueFromStringOrNumber(p.firstline[k]).Type() + field, _ := NewSchemaField().NewID().Name(h).Type(t).Build() + schemafields = append(schemafields, field) + } + } + if haslat && haslng { + field, _ := NewSchemaField().NewID().Name("location").Type(ValueTypeLatLng).Build() + schemafields = append(schemafields, field) + } + schema, err := NewSchema(). + NewID(). + Scene(sid). + Name(p.name). + Source("file:///" + p.name). + Fields(schemafields). + Build() + if err != nil { + return err + } + p.schema = schema + return nil +} + +func (p *DatasetCSVParser) ReadAll() (*Schema, []*Dataset, error) { + if p.schema == nil { + return nil, nil, errors.New("schema is not generated yet") + } + var fields []*Field + schemafieldmap := make(map[string]FieldID) + for _, f := range p.schema.Fields() { + if _, ok := schemafieldmap[f.Name()]; !ok { + schemafieldmap[f.Name()] = f.ID() + } else { + return nil, nil, ErrDuplicatiedNameFields + } + } + datasets := []*Dataset{} + i := 0 + for { + var line []string + var err error + if i == 0 { + // process first line + line = p.firstline + } else { + line, err = p.reader.Read() + if err == io.EOF { + break + } + if err != nil { + return nil, nil, err + } + } + if !p.validateLine(line) { + return nil, nil, ErrFailedToParseCSVorTSVFile + } + + fields, err = p.getFields(line, schemafieldmap) + if err != nil { + return nil, nil, err + } + ds, err := New().NewID(). + Fields(fields). + Scene(p.schema.Scene()).Schema(p.schema.ID()).Build() + if err != nil { + return nil, nil, err + } + datasets = append(datasets, ds) + i++ + } + + return p.schema, datasets, nil +} + +func (p *DatasetCSVParser) getFields(line []string, sfm map[string]FieldID) ([]*Field, error) { + fields := []*Field{} + var lat, lng *float64 + for i, record := range line { + value := ValueFromStringOrNumber(record) + if p.headers[i] == "lng" { + value, err := strconv.ParseFloat(record, 64) + if err != nil { + return nil, ErrFailedToParseCSVorTSVFile + } + lng = &value + } + if p.headers[i] == "lat" { + value, err := strconv.ParseFloat(record, 64) + if err != nil { + return nil, ErrFailedToParseCSVorTSVFile + } + lat = &value + } + + if p.headers[i] != "lat" && p.headers[i] != "lng" { + fields = append(fields, NewField(sfm[p.headers[i]], value, "")) + } + } + if lat != nil && lng != nil { + latlng := LatLng{Lat: *lat, Lng: *lng} + fields = append(fields, NewField(sfm["location"], ValueTypeLatLng.ValueFrom(latlng), "")) + } + return append([]*Field{}, fields...), nil +} + +func (p *DatasetCSVParser) CheckCompatible(s *Schema) error { + fieldsmap := make(map[string]*SchemaField) + for _, f := range s.Fields() { + fieldsmap[f.Name()] = f + } + haslat, haslng := false, false + for i, h := range p.headers { + if h != "lat" && h != "lng" { + if fieldsmap[h] == nil { + return ErrIncompatibleSchema + } + t := fieldsmap[h].Type() + v := ValueFromStringOrNumber(p.firstline[i]) + if v.Type() != t { + return ErrIncompatibleSchema + } + } + if h == "lat" { + haslat = true + } + if h == "lng" { + haslng = true + } + } + // check for location fields + if haslat && haslng { + if fieldsmap["location"] == nil { + return ErrIncompatibleSchema + } + } else { + if fieldsmap["location"] != nil { + return ErrIncompatibleSchema + } + } + + p.schema = s + return nil +} diff --git a/server/pkg/dataset/csvparser_test.go b/server/pkg/dataset/csvparser_test.go new file mode 100644 index 000000000..ea3f8e012 --- /dev/null +++ b/server/pkg/dataset/csvparser_test.go @@ -0,0 +1,59 @@ +package dataset + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +const ( + csvmock = `hoge,foo,bar,lat,lng +1,foo,bar,12,15` +) + +func TestCSVParser(t *testing.T) { + r := strings.NewReader(csvmock) + p := NewCSVParser(r, "hoge.csv", ',') + err := p.Init() + assert.NoError(t, err) + sceneID := NewSceneID() + err = p.GuessSchema(sceneID) + assert.NoError(t, err) + + schema, datasets, err := p.ReadAll() + assert.NoError(t, err) + + assert.NotEmpty(t, schema) + assert.Equal(t, "hoge.csv", schema.Name()) + assert.Equal(t, "file:///hoge.csv", schema.Source()) + + assert.Equal(t, 1, len(datasets)) + + dsfm := make(map[string]interface{}) + for _, dsf := range datasets[0].Fields() { + dsfm[schema.Field(dsf.Field()).Name()] = dsf.Value().Interface() + } + assert.Equal(t, map[string]interface{}{ + "hoge": 1.0, + "foo": "foo", + "bar": "bar", + "location": LatLng{Lat: 12.0, Lng: 15.0}, + }, dsfm) +} + +func TestCSVParserCheckCompatible(t *testing.T) { + r := strings.NewReader(csvmock) + p := NewCSVParser(r, "hoge", ',') + err := p.Init() + assert.NoError(t, err) + f1 := NewSchemaField().NewID().Name("hoge").Type(ValueTypeNumber).MustBuild() + f2 := NewSchemaField().NewID().Name("foo").Type(ValueTypeString).MustBuild() + f3 := NewSchemaField().NewID().Name("bar").Type(ValueTypeString).MustBuild() + f4 := NewSchemaField().NewID().Name("location").Type(ValueTypeLatLng).MustBuild() + fields := []*SchemaField{f1, f2, f3, f4} + ds, err := NewSchema().NewID().Fields(fields).Build() + assert.NoError(t, err) + result := p.CheckCompatible(ds) + assert.NoError(t, result) +} diff --git a/server/pkg/dataset/dataset.go b/server/pkg/dataset/dataset.go new file mode 100644 index 000000000..271ade5ad --- /dev/null +++ b/server/pkg/dataset/dataset.go @@ -0,0 +1,127 @@ +package dataset + +type Dataset struct { + id ID + source string + schema SchemaID + fields map[FieldID]*Field + order []FieldID + scene SceneID +} + +func (d *Dataset) ID() (i ID) { + if d == nil { + return + } + return d.id +} + +func (d *Dataset) Scene() (i SceneID) { + if d == nil { + return + } + return d.scene +} + +func (d *Dataset) Source() string { + if d == nil { + return "" + } + return d.source +} + +func (d *Dataset) Schema() (i SchemaID) { + if d == nil { + return + } + return d.schema +} + +func (d *Dataset) Fields() []*Field { + if d == nil || d.order == nil { + return nil + } + fields := make([]*Field, 0, len(d.fields)) + for _, id := range d.order { + fields = append(fields, d.fields[id]) + } + return fields +} + +func (d *Dataset) Field(id FieldID) *Field { + if d == nil || d.fields == nil { + return nil + } + return d.fields[id] +} + +func (d *Dataset) FieldRef(id *FieldID) *Field { + if d == nil || id == nil { + return nil + } + return d.fields[*id] +} + +func (d *Dataset) NameField(ds *Schema) *Field { + if d == nil { + return nil + } + if d.Schema() != ds.ID() { + return nil + } + f := ds.RepresentativeField() + if f == nil { + return nil + } + return d.fields[f.ID()] +} + +func (d *Dataset) FieldBySource(source string) *Field { + if d == nil { + return nil + } + for _, f := range d.fields { + if f.source == source { + return f + } + } + return nil +} + +func (d *Dataset) FieldByType(t ValueType) *Field { + if d == nil { + return nil + } + for _, f := range d.fields { + if f.Type() == t { + return f + } + } + return nil +} + +// Interface returns a simple and human-readable representation of the dataset +func (d *Dataset) Interface(s *Schema) map[string]interface{} { + if d == nil || s == nil || d.Schema() != s.ID() { + return nil + } + m := map[string]interface{}{} + for _, f := range d.fields { + key := s.Field(f.Field()).Name() + m[key] = f.Value().Interface() + } + return m +} + +// Interface is almost same as Interface, but keys of the map are IDs of fields. +func (d *Dataset) InterfaceWithFieldIDs() map[string]interface{} { + if d == nil { + return nil + } + m := map[string]interface{}{} + for _, f := range d.fields { + key := f.Field().String() + m[key] = f.Value().Interface() + } + return m +} diff --git a/server/pkg/dataset/dataset_test.go b/server/pkg/dataset/dataset_test.go new file mode 100644 index 000000000..df3110ca7 --- /dev/null +++ b/server/pkg/dataset/dataset_test.go @@ -0,0 +1,88 @@ +package dataset + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDataset_Interface(t *testing.T) { + f1 := NewFieldID() + f2 := NewFieldID() + sid := NewSchemaID() + + tests := []struct { + name string + schema *Schema + dataset *Dataset + want map[string]interface{} + }{ + { + name: "ok", + schema: NewSchema().ID(sid).Scene(NewSceneID()).Fields([]*SchemaField{ + NewSchemaField().ID(f1).Name("foo").Type(ValueTypeNumber).MustBuild(), + NewSchemaField().ID(f2).Name("bar").Type(ValueTypeLatLng).MustBuild(), + }).MustBuild(), + dataset: New().NewID().Scene(NewSceneID()).Schema(sid).Fields([]*Field{ + NewField(f1, ValueTypeNumber.ValueFrom(1), ""), + NewField(f2, ValueTypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}), ""), + }).MustBuild(), + want: map[string]interface{}{ + "foo": float64(1), + "bar": LatLng{Lat: 1, Lng: 2}, + }, + }, + { + name: "empty", + dataset: &Dataset{}, + }, + { + name: "nil", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.dataset.Interface(tt.schema)) + }) + } +} + +func TestDataset_InterfaceWithFieldIDs(t *testing.T) { + f1 := NewFieldID() + f2 := NewFieldID() + + tests := []struct { + name string + dataset *Dataset + want map[string]interface{} + }{ + { + name: "ok", + dataset: New().NewID().Scene(NewSceneID()).Schema(NewSchemaID()).Fields([]*Field{ + NewField(f1, ValueTypeNumber.ValueFrom(1), ""), + NewField(f2, ValueTypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}), ""), + }).MustBuild(), + want: map[string]interface{}{ + f1.String(): float64(1), + f2.String(): LatLng{Lat: 1, Lng: 2}, + }, + }, + { + name: "empty", + dataset: &Dataset{}, + want: map[string]interface{}{}, + }, + { + name: "nil", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.dataset.InterfaceWithFieldIDs()) + }) + } +} diff --git a/server/pkg/dataset/diff.go b/server/pkg/dataset/diff.go new file mode 100644 index 000000000..c7b5722d2 --- /dev/null +++ b/server/pkg/dataset/diff.go @@ -0,0 +1,7 @@ +package dataset + +type Diff struct { + Added List + Removed List + Others map[ID]*Dataset +} diff --git a/server/pkg/dataset/field.go b/server/pkg/dataset/field.go new file mode 100644 index 000000000..bf6355c60 --- /dev/null +++ b/server/pkg/dataset/field.go @@ -0,0 +1,56 @@ +package dataset + +type Field struct { + field FieldID + value *Value + source string +} + +func NewField(field FieldID, value *Value, source string) *Field { + if value == nil { + return nil + } + return &Field{ + field: field, + value: value, + } +} + +func (d *Field) Field() (i FieldID) { + if d == nil { + return + } + return d.field +} + +func (d *Field) FieldRef() *FieldID { + if d == nil { + return nil + } + return d.field.Ref() +} + +func (d *Field) IsEmpty() bool { + return d == nil || d.field.IsNil() || d.value == nil +} + +func (d *Field) Value() *Value { + if d == nil { + return nil + } + return d.value.Clone() +} + +func (d *Field) Type() ValueType { + if d == nil { + return ValueTypeUnknown + } + return d.value.Type() +} + +func (d *Field) Source() string { + if d == nil { + return "" + } + return d.source +} diff --git a/server/pkg/dataset/graph_iterator.go b/server/pkg/dataset/graph_iterator.go new file mode 100644 index 000000000..17e357e18 --- /dev/null +++ b/server/pkg/dataset/graph_iterator.go @@ -0,0 +1,69 @@ +package dataset + +// GraphIterator is a iterator for graphically exploring a dataset. +type GraphIterator struct { + m Map + ids [][]ID + currentIndex int + currentDepthIndex int + maxDepth int +} + +func GraphIteratorFrom(root ID, depth int) *GraphIterator { + return &GraphIterator{ + ids: [][]ID{{root}}, + maxDepth: depth, + } +} + +func (di *GraphIterator) Next(d *Dataset) (ID, bool) { + if di == nil || di.maxDepth == 0 || len(di.ids) == 0 || d == nil { + return ID{}, false + } + if di.currentDepthIndex >= len(di.ids) { + return ID{}, true + } + + if di.m == nil { + di.m = Map{} + } + di.m[d.ID()] = d + + // add fields + if len(di.ids) <= di.currentDepthIndex+1 { + di.ids = append(di.ids, []ID{}) + } + nextDepthIDs := di.ids[di.currentDepthIndex+1] + currentIDs := di.ids[di.currentDepthIndex] + for _, f := range d.Fields() { + if r := f.Value().ValueRef(); r != nil { + if rid, err := IDFrom(*r); err == nil { + nextDepthIDs = append(nextDepthIDs, rid) + } + } + } + di.ids[di.currentDepthIndex+1] = nextDepthIDs + + // next + if di.currentIndex == len(currentIDs)-1 { + di.currentIndex = 0 + // next depth + if di.maxDepth <= di.currentDepthIndex || len(nextDepthIDs) == 0 { + // done + di.currentDepthIndex++ + return ID{}, true + } + di.currentDepthIndex++ + } else { + di.currentIndex++ + } + + return di.ids[di.currentDepthIndex][di.currentIndex], false +} + +func (di *GraphIterator) Result() Map { + if di == nil { + return nil + } + return di.m +} diff --git a/server/pkg/dataset/graph_iterator_test.go b/server/pkg/dataset/graph_iterator_test.go new file mode 100644 index 000000000..65d880fba --- /dev/null +++ b/server/pkg/dataset/graph_iterator_test.go @@ -0,0 +1,63 @@ +package dataset + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDatasetGraphIterator(t *testing.T) { + sid := NewSceneID() + dsid := NewSchemaID() + + d0id := NewID() + d11id := NewID() + d12id := NewID() + d21id := NewID() + d31id := NewID() + d32id := NewID() + + d0, _ := New().ID(d0id).Schema(dsid).Scene(sid).Fields([]*Field{ + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d11id), ""), + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d12id), ""), + }).Build() + d11, _ := New().ID(d11id).Schema(dsid).Scene(sid).Fields([]*Field{ + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d21id), ""), + }).Build() + d12, _ := New().ID(d12id).Schema(dsid).Scene(sid).Fields([]*Field{ + NewField(NewFieldID(), ValueTypeString.ValueFrom("hoge"), ""), + }).Build() + d21, _ := New().ID(d21id).Schema(dsid).Scene(sid).Fields([]*Field{ + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d31id), ""), + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d32id), ""), + }).Build() + d31, _ := New().ID(d31id).Schema(dsid).Scene(sid).Fields([]*Field{ + NewField(NewFieldID(), ValueTypeString.ValueFrom("foo"), ""), + }).Build() + d32, _ := New().ID(d32id).Schema(dsid).Scene(sid).Fields([]*Field{ + NewField(NewFieldID(), ValueTypeString.ValueFrom("bar"), ""), + }).Build() + + it := GraphIteratorFrom(d0id, 3) + testTestDatasetGraphIteratorNext( + t, it, []*Dataset{d0, d11, d12, d21, d31, d32}, + ) + it = GraphIteratorFrom(d0id, 2) + testTestDatasetGraphIteratorNext( + t, it, []*Dataset{d0, d11, d12, d21}, + ) +} + +func testTestDatasetGraphIteratorNext(t *testing.T, it *GraphIterator, ds List) { + t.Helper() + for i, d := range ds { + next, done := it.Next(d) + if i == len(ds)-1 { + assert.Equal(t, true, done) + } else { + assert.False(t, done, "next done %d", i) + assert.Equal(t, ds[i+1].ID(), next, "next %d", i) + } + } + assert.Equal(t, ds.Map(), it.Result()) +} diff --git a/server/pkg/dataset/graph_loader.go b/server/pkg/dataset/graph_loader.go new file mode 100644 index 000000000..623280e9b --- /dev/null +++ b/server/pkg/dataset/graph_loader.go @@ -0,0 +1,35 @@ +package dataset + +import ( + "context" +) + +type GraphLoader func(context.Context, ID, ...FieldID) (List, *Field, error) + +func GraphLoaderFromMap(m Map) GraphLoader { + return func(ctx context.Context, root ID, fields ...FieldID) (List, *Field, error) { + list, field := m.GraphSearchByFields(root, fields...) + return list, field, nil + } +} + +func GraphLoaderFromMapAndGraph(m Map, g GraphLoader) GraphLoader { + return func(ctx context.Context, root ID, fields ...FieldID) (List, *Field, error) { + if m != nil { + if len(fields) == 0 { + return List{m[root]}, nil, nil + } + if len(fields) == 1 { + ds := m[root] + return List{ds}, ds.Field(fields[0]), nil + } + list, field := m.GraphSearchByFields(root, fields...) + if list != nil && field != nil { + return list, field, nil + } + } + + // it needs looking up dataset graph + return g(ctx, root, fields...) + } +} diff --git a/server/pkg/dataset/id.go b/server/pkg/dataset/id.go new file mode 100644 index 000000000..47e93d428 --- /dev/null +++ b/server/pkg/dataset/id.go @@ -0,0 +1,40 @@ +package dataset + +import "github.com/reearth/reearth-backend/pkg/id" + +type ID = id.DatasetID +type FieldID = id.DatasetFieldID +type SchemaID = id.DatasetSchemaID +type SceneID = id.SceneID + +var NewID = id.NewDatasetID +var NewSchemaID = id.NewDatasetSchemaID +var NewFieldID = id.NewDatasetFieldID +var NewSceneID = id.NewSceneID + +var MustID = id.MustDatasetID +var MustSchemaID = id.MustDatasetSchemaID +var MustFieldID = id.MustDatasetFieldID +var MustSceneID = id.MustSceneID + +var IDFrom = id.DatasetIDFrom +var SchemaIDFrom = id.DatasetSchemaIDFrom +var FieldIDFrom = id.DatasetFieldIDFrom +var SceneIDFrom = id.SceneIDFrom + +var IDFromRef = id.DatasetIDFromRef +var SchemaIDFromRef = id.DatasetSchemaIDFromRef +var FieldIDFromRef = id.DatasetFieldIDFromRef +var SceneIDFromRef = id.SceneIDFromRef + +type IDSet = id.DatasetIDSet +type SchemaIDSet = id.DatasetSchemaIDSet +type FieldIDSet = id.DatasetFieldIDSet +type SceneIDSet = id.SceneIDSet + +var NewIDSet = id.NewDatasetIDSet +var NewSchemaIDset = id.NewDatasetSchemaIDSet +var NewFieldIDset = id.NewDatasetFieldIDSet +var NewSceneIDset = id.NewSceneIDSet + +var ErrInvalidID = id.ErrInvalidID diff --git a/server/pkg/dataset/list.go b/server/pkg/dataset/list.go new file mode 100644 index 000000000..7675c6a38 --- /dev/null +++ b/server/pkg/dataset/list.go @@ -0,0 +1,182 @@ +package dataset + +type List []*Dataset + +func (l List) First() *Dataset { + if l == nil || len(l) == 0 { + return nil + } + return l[0] +} + +func (l List) Last() *Dataset { + if l == nil || len(l) == 0 { + return nil + } + return l[len(l)-1] +} + +func (l List) FindDataset(id ID) *Dataset { + for _, t := range l { + if t.ID() == id { + return t + } + } + return nil +} + +func (l List) ToDatasetIds() []ID { + if l == nil { + return nil + } + + ids := []ID{} + for _, t := range l { + ids = append(ids, t.ID()) + } + return ids +} + +func (l List) FindDatasetBySource(s string) *Dataset { + for _, t := range l { + if t.Source() == s { + return t + } + } + return nil +} + +func (l List) FilterByDatasetSchema(s SchemaID) List { + n := List{} + for _, t := range l { + if t.Schema() == s { + n = append(n, t) + } + } + return n +} + +func (l List) DiffBySource(l2 List) Diff { + // l is old, l2 is new + added := []*Dataset{} + removed := []*Dataset{} + // others := map[string]DatasetDiffTouple{} + others2 := map[ID]*Dataset{} + + s1 := map[string]*Dataset{} + for _, d1 := range l { + s1[d1.Source()] = d1 + } + + for _, d2 := range l2 { + if d1, ok := s1[d2.Source()]; ok { + // others + // others[d2.Source()] = DatasetDiffTouple{Old: d1, New: d2} + others2[d1.ID()] = d2 + } else { + // added + added = append(added, d2) + } + } + + for _, d1 := range l { + if _, ok := others2[d1.ID()]; !ok { + // removed + removed = append(removed, d1) + } + } + + return Diff{ + Added: added, + Removed: removed, + Others: others2, + // Others: others, + } +} + +func (l List) Map() Map { + if l == nil { + return nil + } + m := Map{} + for _, d := range l { + if d != nil { + m[d.ID()] = d + } + } + return m +} + +func (l List) Loader() Loader { + return LoaderFrom(l) +} + +func (l List) GraphLoader() GraphLoader { + return GraphLoaderFromMap(l.Map()) +} + +type Map map[ID]*Dataset + +func (dm Map) Add(dss ...*Dataset) { + if dss == nil { + return + } + if dm == nil { + dm = map[ID]*Dataset{} + } + for _, ds := range dss { + if ds == nil { + continue + } + dm[ds.ID()] = ds + } +} + +func (dm Map) Slice() List { + if dm == nil { + return nil + } + res := make(List, 0, len(dm)) + for _, d := range dm { + res = append(res, d) + } + return res +} + +func (dm Map) GraphSearchByFields(root ID, fields ...FieldID) (List, *Field) { + res := make(List, 0, len(fields)) + currentD := dm[root] + if currentD == nil { + return res, nil + } + for i, f := range fields { + if currentD == nil { + return res, nil + } + res = append(res, currentD) + field := currentD.Field(f) + if field == nil { + return res, nil + } + if len(fields)-1 == i { + return res, field + } else if fids := field.Value().ValueRef(); fids != nil { + if fid, err := IDFrom(*fids); err == nil { + currentD = dm[ID(fid)] + } else { + return res, nil + } + } else { + return res, nil + } + } + return res, nil +} + +func (dm Map) Loader() Loader { + return LoaderFromMap(dm) +} + +func (dm Map) GraphLoader() GraphLoader { + return GraphLoaderFromMap(dm) +} diff --git a/server/pkg/dataset/list_test.go b/server/pkg/dataset/list_test.go new file mode 100644 index 000000000..dde511008 --- /dev/null +++ b/server/pkg/dataset/list_test.go @@ -0,0 +1,76 @@ +package dataset + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDatasetListDiff(t *testing.T) { + sid := NewSceneID() + source1 := "hogehoge/1" + source2 := "hogehoge/2" + source3 := "hogehoge/3" + d1, _ := New().NewID().Scene(sid).Source(source1).Build() + d2, _ := New().NewID().Scene(sid).Source(source2).Build() + d3, _ := New().NewID().Scene(sid).Source(source2).Build() + d4, _ := New().NewID().Scene(sid).Source(source3).Build() + d5, _ := New().NewID().Scene(sid).Source(source2).Build() // duplicated source + + l1 := List{d1, d2} + l2 := List{d3, d4} + diff := l1.DiffBySource(l2) + expected := Diff{ + Added: []*Dataset{d4}, + Removed: []*Dataset{d1}, + Others: map[ID]*Dataset{ + d2.ID(): d3, + }, + } + assert.Equal(t, expected, diff) + + l1 = List{d1, d2, d5} + l2 = List{d3, d4} + diff = l1.DiffBySource(l2) + expected = Diff{ + Added: []*Dataset{d4}, + Removed: []*Dataset{d1, d2}, + Others: map[ID]*Dataset{ + d5.ID(): d3, + }, + } + assert.Equal(t, expected, diff) +} + +func TestDatasetMapGraphSearchByFields(t *testing.T) { + did1 := NewID() + did2 := NewID() + did3 := NewID() + fid1 := NewFieldID() + fid2 := NewFieldID() + fid3 := NewFieldID() + sid := NewSceneID() + v1 := ValueTypeRef.ValueFrom(did2) + v2 := ValueTypeRef.ValueFrom(did3) + v3 := ValueTypeString.ValueFrom("value") + f3 := NewField(fid3, v3, "") + d1, _ := New().ID(did1).Scene(sid).Fields([]*Field{ + NewField(fid1, v1, ""), + }).Build() + d2, _ := New().ID(did2).Scene(sid).Fields([]*Field{ + NewField(fid2, v2, ""), + }).Build() + d3, _ := New().ID(did3).Scene(sid).Fields([]*Field{ + f3, + }).Build() + + m := List{d1, d2, d3}.Map() + + res, resf := m.GraphSearchByFields(did1, fid1, fid2, fid3) + assert.Equal(t, List{d1, d2, d3}, res) + assert.Equal(t, f3, resf) + + res2, resf2 := m.GraphSearchByFields(did1, fid1, fid3, fid2) + assert.Equal(t, List{d1, d2}, res2) + assert.Nil(t, resf2) +} diff --git a/server/pkg/dataset/loader.go b/server/pkg/dataset/loader.go new file mode 100644 index 000000000..4e20a0458 --- /dev/null +++ b/server/pkg/dataset/loader.go @@ -0,0 +1,41 @@ +package dataset + +import ( + "context" +) + +type Loader func(context.Context, ...ID) (List, error) + +func LoaderFrom(data []*Dataset) Loader { + return func(ctx context.Context, ids ...ID) (List, error) { + res := make(List, 0, len(ids)) + for _, i := range ids { + found := false + for _, d := range data { + if i == d.ID() { + res = append(res, d) + found = true + break + } + } + if !found { + res = append(res, nil) + } + } + return res, nil + } +} + +func LoaderFromMap(data map[ID]*Dataset) Loader { + return func(ctx context.Context, ids ...ID) (List, error) { + res := make(List, 0, len(ids)) + for _, i := range ids { + if d, ok := data[i]; ok { + res = append(res, d) + } else { + res = append(res, nil) + } + } + return res, nil + } +} diff --git a/server/pkg/dataset/schema.go b/server/pkg/dataset/schema.go new file mode 100644 index 000000000..b6d01b1fa --- /dev/null +++ b/server/pkg/dataset/schema.go @@ -0,0 +1,118 @@ +package dataset + +type Schema struct { + id SchemaID + source string + name string + fields map[FieldID]*SchemaField + order []FieldID + representativeField *FieldID + scene SceneID + dynamic bool +} + +func (d *Schema) ID() (i SchemaID) { + if d == nil { + return + } + return d.id +} + +func (d *Schema) IDRef() *SchemaID { + if d == nil { + return nil + } + return d.id.Ref() +} + +func (d *Schema) Scene() (i SceneID) { + if d == nil { + return + } + return d.scene +} + +func (d *Schema) Source() (s string) { + if d == nil { + return + } + return d.source +} + +func (d *Schema) Name() string { + if d == nil { + return "" + } + return d.name +} + +func (d *Schema) RepresentativeFieldID() *FieldID { + if d == nil { + return nil + } + return d.representativeField +} + +func (d *Schema) RepresentativeField() *SchemaField { + if d == nil || d.representativeField == nil { + return nil + } + return d.fields[*d.representativeField] +} + +func (d *Schema) Fields() []*SchemaField { + if d == nil || d.order == nil { + return nil + } + fields := make([]*SchemaField, 0, len(d.fields)) + for _, id := range d.order { + fields = append(fields, d.fields[id]) + } + return fields +} + +func (d *Schema) Field(id FieldID) *SchemaField { + if d == nil { + return nil + } + return d.fields[id] +} + +func (d *Schema) FieldRef(id *FieldID) *SchemaField { + if d == nil || id == nil { + return nil + } + return d.fields[*id] +} + +func (d *Schema) FieldBySource(source string) *SchemaField { + if d == nil { + return nil + } + for _, f := range d.fields { + if f.source == source { + return f + } + } + return nil +} + +func (d *Schema) FieldByType(t ValueType) *SchemaField { + if d == nil { + return nil + } + for _, f := range d.fields { + if f.Type() == t { + return f + } + } + return nil +} + +func (d *Schema) Dynamic() bool { + return d.dynamic +} + +func (u *Schema) Rename(name string) { + u.name = name +} diff --git a/server/pkg/dataset/schema_builder.go b/server/pkg/dataset/schema_builder.go new file mode 100644 index 000000000..b1da43667 --- /dev/null +++ b/server/pkg/dataset/schema_builder.go @@ -0,0 +1,87 @@ +package dataset + +type SchemaBuilder struct { + d *Schema +} + +func NewSchema() *SchemaBuilder { + return &SchemaBuilder{d: &Schema{}} +} + +func (b *SchemaBuilder) Build() (*Schema, error) { + if b.d.id.IsNil() { + return nil, ErrInvalidID + } + if b.d.fields == nil || b.d.order == nil { + b.d.fields = map[FieldID]*SchemaField{} + b.d.order = []FieldID{} + } + return b.d, nil +} + +func (b *SchemaBuilder) MustBuild() *Schema { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *SchemaBuilder) ID(id SchemaID) *SchemaBuilder { + b.d.id = id + return b +} + +func (b *SchemaBuilder) NewID() *SchemaBuilder { + b.d.id = NewSchemaID() + return b +} + +func (b *SchemaBuilder) Scene(scene SceneID) *SchemaBuilder { + b.d.scene = scene + return b +} + +func (b *SchemaBuilder) Name(name string) *SchemaBuilder { + b.d.name = name + return b +} + +func (b *SchemaBuilder) Dynamic(dynamic bool) *SchemaBuilder { + b.d.dynamic = dynamic + return b +} + +func (b *SchemaBuilder) Source(source string) *SchemaBuilder { + b.d.source = source + return b +} + +func (b *SchemaBuilder) RepresentativeField(representativeField FieldID) *SchemaBuilder { + rf := representativeField + b.d.representativeField = &rf + return b +} + +func (b *SchemaBuilder) Fields(fields []*SchemaField) *SchemaBuilder { + b.d.fields = map[FieldID]*SchemaField{} + b.d.order = make([]FieldID, 0, len(fields)) + sources := map[string]struct{}{} + + for _, f := range fields { + if f == nil { + continue + } + + if source := f.Source(); source == "" { + b.d.fields[f.ID()] = f.Clone() + b.d.order = append(b.d.order, f.ID()) + } else if _, ok := sources[source]; !ok { + b.d.fields[f.ID()] = f.Clone() + b.d.order = append(b.d.order, f.ID()) + sources[source] = struct{}{} + } + } + + return b +} diff --git a/server/pkg/dataset/schema_field.go b/server/pkg/dataset/schema_field.go new file mode 100644 index 000000000..cc7478075 --- /dev/null +++ b/server/pkg/dataset/schema_field.go @@ -0,0 +1,64 @@ +package dataset + +type SchemaField struct { + id FieldID + name string + dataType ValueType + source string + ref *SchemaID +} + +func (d *SchemaField) ID() (i FieldID) { + if d == nil { + return + } + return d.id +} + +func (d *SchemaField) IDRef() *FieldID { + if d == nil { + return nil + } + return d.id.Ref() +} + +func (d *SchemaField) Name() (n string) { + if d == nil { + return + } + return d.name +} + +func (d *SchemaField) Ref() *SchemaID { + if d == nil { + return nil + } + return d.ref +} + +func (d *SchemaField) Type() (v ValueType) { + if d == nil { + return + } + return d.dataType +} + +func (d *SchemaField) Source() (s string) { + if d == nil { + return + } + return d.source +} + +func (d *SchemaField) Clone() *SchemaField { + if d == nil { + return nil + } + return &SchemaField{ + id: d.id, + name: d.name, + dataType: d.dataType, + source: d.source, + ref: d.ref.CopyRef(), + } +} diff --git a/server/pkg/dataset/schema_field_builder.go b/server/pkg/dataset/schema_field_builder.go new file mode 100644 index 000000000..d971401b4 --- /dev/null +++ b/server/pkg/dataset/schema_field_builder.go @@ -0,0 +1,61 @@ +package dataset + +import ( + "errors" +) + +type SchemaFieldBuilder struct { + d *SchemaField +} + +func NewSchemaField() *SchemaFieldBuilder { + return &SchemaFieldBuilder{d: &SchemaField{}} +} + +func (b *SchemaFieldBuilder) Build() (*SchemaField, error) { + if b.d.id.IsNil() { + return nil, ErrInvalidID + } + if !b.d.dataType.Default() { + return nil, errors.New("invalid value type") + } + return b.d, nil +} + +func (b *SchemaFieldBuilder) MustBuild() *SchemaField { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *SchemaFieldBuilder) ID(id FieldID) *SchemaFieldBuilder { + b.d.id = id + return b +} + +func (b *SchemaFieldBuilder) NewID() *SchemaFieldBuilder { + b.d.id = NewFieldID() + return b +} + +func (b *SchemaFieldBuilder) Name(name string) *SchemaFieldBuilder { + b.d.name = name + return b +} + +func (b *SchemaFieldBuilder) Type(dataType ValueType) *SchemaFieldBuilder { + b.d.dataType = dataType + return b +} + +func (b *SchemaFieldBuilder) Source(source string) *SchemaFieldBuilder { + b.d.source = source + return b +} + +func (b *SchemaFieldBuilder) Ref(ref *SchemaID) *SchemaFieldBuilder { + b.d.ref = ref.CopyRef() + return b +} diff --git a/server/pkg/dataset/schema_field_diff.go b/server/pkg/dataset/schema_field_diff.go new file mode 100644 index 000000000..1bcace8ae --- /dev/null +++ b/server/pkg/dataset/schema_field_diff.go @@ -0,0 +1,41 @@ +package dataset + +type SchemaFieldDiff struct { + Added []*SchemaField + Removed []*SchemaField + Replaced map[FieldID]*SchemaField +} + +func (d *Schema) FieldDiffBySource(d2 *Schema) SchemaFieldDiff { + added := []*SchemaField{} + removed := []*SchemaField{} + // others := map[string]DatasetDiffTouple{} + others2 := map[FieldID]*SchemaField{} + + s1 := map[string]*SchemaField{} + for _, d1 := range d.fields { + s1[d1.Source()] = d1 + } + + for _, d2 := range d2.fields { + if d1, ok := s1[d2.Source()]; ok { + others2[d1.ID()] = d2 + } else { + // added + added = append(added, d2) + } + } + + for _, d1 := range d.fields { + if _, ok := others2[d1.ID()]; !ok { + // removed + removed = append(removed, d1) + } + } + + return SchemaFieldDiff{ + Added: added, + Removed: removed, + Replaced: others2, + } +} diff --git a/server/pkg/dataset/schema_graph_iterator.go b/server/pkg/dataset/schema_graph_iterator.go new file mode 100644 index 000000000..59b4a7652 --- /dev/null +++ b/server/pkg/dataset/schema_graph_iterator.go @@ -0,0 +1,67 @@ +package dataset + +// SchemaGraphIterator ใฏใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ใ‚ฐใƒฉใƒ•ๆŽข็ดขใ™ใ‚‹ใŸใ‚ใฎใ‚คใƒ†ใƒฌใƒผใ‚ฟใงใ™ใ€‚ +type SchemaGraphIterator struct { + m SchemaMap + ids [][]SchemaID + currentIndex int + currentDepthIndex int + maxDepth int +} + +func SchemaGraphIteratorFrom(root SchemaID, depth int) *SchemaGraphIterator { + return &SchemaGraphIterator{ + ids: [][]SchemaID{{root}}, + maxDepth: depth, + } +} + +func (di *SchemaGraphIterator) Next(d *Schema) (SchemaID, bool) { + if di == nil || di.maxDepth == 0 || di.ids == nil || len(di.ids) == 0 || d == nil { + return SchemaID{}, false + } + if di.currentDepthIndex >= len(di.ids) { + return SchemaID{}, true + } + + if di.m == nil { + di.m = SchemaMap{} + } + di.m[d.ID()] = d + + // add fields + if len(di.ids) <= di.currentDepthIndex+1 { + di.ids = append(di.ids, []SchemaID{}) + } + nextDepthIDs := di.ids[di.currentDepthIndex+1] + currentIDs := di.ids[di.currentDepthIndex] + for _, f := range d.Fields() { + if r := f.Ref(); r != nil { + nextDepthIDs = append(nextDepthIDs, *r) + } + } + di.ids[di.currentDepthIndex+1] = nextDepthIDs + + // next + if di.currentIndex == len(currentIDs)-1 { + di.currentIndex = 0 + // next depth + if di.maxDepth <= di.currentDepthIndex || len(nextDepthIDs) == 0 { + // done + di.currentDepthIndex++ + return SchemaID{}, true + } + di.currentDepthIndex++ + } else { + di.currentIndex++ + } + + return di.ids[di.currentDepthIndex][di.currentIndex], false +} + +func (di *SchemaGraphIterator) Result() SchemaMap { + if di == nil { + return nil + } + return di.m +} diff --git a/server/pkg/dataset/schema_graph_iterator_test.go b/server/pkg/dataset/schema_graph_iterator_test.go new file mode 100644 index 000000000..c7f556bca --- /dev/null +++ b/server/pkg/dataset/schema_graph_iterator_test.go @@ -0,0 +1,68 @@ +package dataset + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDatasetSchemaGraphIterator(t *testing.T) { + sid := NewSceneID() + d0id := NewSchemaID() + d11id := NewSchemaID() + d12id := NewSchemaID() + d21id := NewSchemaID() + d31id := NewSchemaID() + d32id := NewSchemaID() + + d0f0, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeRef).Ref(&d11id).Build() + d0f1, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeRef).Ref(&d12id).Build() + d11f0, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeString).Build() + d12f0, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeRef).Ref(&d21id).Build() + d21f0, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeRef).Ref(&d31id).Build() + d21f1, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeRef).Ref(&d32id).Build() + d31f0, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeString).Build() + d32f0, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeString).Build() + + d0, _ := NewSchema().ID(d0id).Scene(sid).Fields([]*SchemaField{ + d0f0, d0f1, + }).Build() + d11, _ := NewSchema().ID(d11id).Scene(sid).Fields([]*SchemaField{ + d11f0, + }).Build() + d12, _ := NewSchema().ID(d12id).Scene(sid).Fields([]*SchemaField{ + d12f0, + }).Build() + d21, _ := NewSchema().ID(d21id).Scene(sid).Fields([]*SchemaField{ + d21f0, + d21f1, + }).Build() + d31, _ := NewSchema().ID(d31id).Scene(sid).Fields([]*SchemaField{ + d31f0, + }).Build() + d32, _ := NewSchema().ID(d32id).Scene(sid).Fields([]*SchemaField{ + d32f0, + }).Build() + + it := SchemaGraphIteratorFrom(d0id, 3) + testTestDatasetSchemaGraphIteratorNext( + t, it, []*Schema{d0, d11, d12, d21, d31, d32}, + ) + it = SchemaGraphIteratorFrom(d0id, 2) + testTestDatasetSchemaGraphIteratorNext( + t, it, []*Schema{d0, d11, d12, d21}, + ) +} + +func testTestDatasetSchemaGraphIteratorNext(t *testing.T, it *SchemaGraphIterator, ds SchemaList) { + for i, d := range ds { + next, done := it.Next(d) + if i == len(ds)-1 { + assert.Equal(t, true, done) + } else { + assert.Equal(t, ds[i+1].ID(), next, "next %d", i) + assert.Equal(t, false, done, "next done %d", i) + } + } + assert.Equal(t, ds.Map(), it.Result()) +} diff --git a/server/pkg/dataset/schema_list.go b/server/pkg/dataset/schema_list.go new file mode 100644 index 000000000..f9041298d --- /dev/null +++ b/server/pkg/dataset/schema_list.go @@ -0,0 +1,57 @@ +package dataset + +type SchemaList []*Schema + +func (dsl SchemaList) Map() SchemaMap { + if dsl == nil { + return nil + } + m := SchemaMap{} + for _, d := range dsl { + if d != nil { + m[d.ID()] = d + } + } + return m +} + +type SchemaMap map[SchemaID]*Schema + +func (dsm SchemaMap) Slice() SchemaList { + if dsm == nil { + return nil + } + res := make(SchemaList, 0, len(dsm)) + for _, ds := range dsm { + if ds != nil { + res = append(res, ds) + } + } + return res +} + +func (dsm SchemaMap) GraphSearchByFields(root SchemaID, fields ...FieldID) (SchemaList, *SchemaField) { + res := make(SchemaList, 0, len(fields)) + currentDs := dsm[root] + if currentDs == nil { + return res, nil + } + for i, f := range fields { + if currentDs == nil { + return res, nil + } + res = append(res, currentDs) + field := currentDs.Field(f) + if field == nil { + return res, nil + } + if len(fields)-1 == i { + return res, field + } else if r := field.Ref(); r != nil { + currentDs = dsm[*r] + } else { + return res, nil + } + } + return res, nil +} diff --git a/server/pkg/dataset/schema_list_test.go b/server/pkg/dataset/schema_list_test.go new file mode 100644 index 000000000..267c4b32a --- /dev/null +++ b/server/pkg/dataset/schema_list_test.go @@ -0,0 +1,39 @@ +package dataset + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDatasetSchemaMapGraphSearchByFields(t *testing.T) { + did1 := NewSchemaID() + did2 := NewSchemaID() + did3 := NewSchemaID() + fid1 := NewFieldID() + fid2 := NewFieldID() + fid3 := NewFieldID() + sid := NewSceneID() + f1, _ := NewSchemaField().ID(fid1).Type(ValueTypeString).Ref(&did2).Build() + f2, _ := NewSchemaField().ID(fid2).Type(ValueTypeString).Ref(&did3).Build() + f3, _ := NewSchemaField().ID(fid3).Type(ValueTypeString).Build() + d1, _ := NewSchema().ID(did1).Scene(sid).Fields([]*SchemaField{ + f1, + }).Build() + d2, _ := NewSchema().ID(did2).Scene(sid).Fields([]*SchemaField{ + f2, + }).Build() + d3, _ := NewSchema().ID(did3).Scene(sid).Fields([]*SchemaField{ + f3, + }).Build() + + m := SchemaList{d1, d2, d3}.Map() + + res, resf := m.GraphSearchByFields(did1, fid1, fid2, fid3) + assert.Equal(t, SchemaList{d1, d2, d3}, res) + assert.Equal(t, f3, resf) + + res2, resf2 := m.GraphSearchByFields(did1, fid1, fid3, fid2) + assert.Equal(t, SchemaList{d1, d2}, res2) + assert.Nil(t, resf2) +} diff --git a/server/pkg/dataset/value.go b/server/pkg/dataset/value.go new file mode 100644 index 000000000..389ad34a6 --- /dev/null +++ b/server/pkg/dataset/value.go @@ -0,0 +1,233 @@ +package dataset + +import ( + "net/url" + + "github.com/reearth/reearth-backend/pkg/value" +) + +type LatLng = value.LatLng +type LatLngHeight = value.LatLngHeight +type Coordinates = value.Coordinates +type Rect = value.Rect +type Polygon = value.Polygon + +type ValueType value.Type + +var ( + ValueTypeUnknown = ValueType(value.TypeUnknown) + ValueTypeBool = ValueType(value.TypeBool) + ValueTypeNumber = ValueType(value.TypeNumber) + ValueTypeString = ValueType(value.TypeString) + ValueTypeRef = ValueType(value.TypeRef) + ValueTypeURL = ValueType(value.TypeURL) + ValueTypeLatLng = ValueType(value.TypeLatLng) + ValueTypeLatLngHeight = ValueType(value.TypeLatLngHeight) + ValueTypeCoordinates = ValueType(value.TypeCoordinates) + ValueTypeRect = ValueType(value.TypeRect) + TypePolygon = ValueType(value.TypePolygon) +) + +func (vt ValueType) Valid() bool { + return value.Type(vt).Default() +} + +func (t ValueType) Default() bool { + return value.Type(t).Default() +} + +func (t ValueType) ValueFrom(i interface{}) *Value { + vv := value.Type(t).ValueFrom(i, nil) + if vv == nil { + return nil + } + return &Value{v: *vv} +} + +func (vt ValueType) MustBeValue(i interface{}) *Value { + if v := vt.ValueFrom(i); v != nil { + return v + } + panic("invalid value") +} + +func (vt ValueType) None() *OptionalValue { + return NewOptionalValue(vt, nil) +} + +type Value struct { + v value.Value +} + +func (v *Value) IsEmpty() bool { + return v == nil || v.v.IsEmpty() +} + +func (v *Value) Clone() *Value { + if v == nil { + return nil + } + vv := v.v.Clone() + if vv == nil { + return nil + } + return &Value{v: *vv} +} + +func (v *Value) Some() *OptionalValue { + return OptionalValueFrom(v) +} + +func (v *Value) Type() ValueType { + if v == nil { + return ValueTypeUnknown + } + return ValueType(v.v.Type()) +} + +func (v *Value) Value() interface{} { + if v == nil { + return nil + } + return v.v.Value() +} + +func (v *Value) Interface() interface{} { + if v == nil { + return nil + } + return v.v.Interface() +} + +func (v *Value) Cast(vt ValueType) *Value { + if v == nil { + return nil + } + nv := v.v.Cast(value.Type(vt), nil) + if nv == nil { + return nil + } + return &Value{v: *nv} +} + +func (v *Value) ValueBool() *bool { + if v == nil { + return nil + } + vv, ok := v.v.ValueBool() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueNumber() *float64 { + if v == nil { + return nil + } + vv, ok := v.v.ValueNumber() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueString() *string { + if v == nil { + return nil + } + vv, ok := v.v.ValueString() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueRef() *string { + if v == nil { + return nil + } + vv, ok := v.v.ValueRef() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueURL() *url.URL { + if v == nil { + return nil + } + vv, ok := v.v.ValueURL() + if ok { + return vv + } + return nil +} + +func (v *Value) ValueLatLng() *LatLng { + if v == nil { + return nil + } + vv, ok := v.v.ValueLatLng() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueLatLngHeight() *LatLngHeight { + if v == nil { + return nil + } + vv, ok := v.v.ValueLatLngHeight() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueCoordinates() *Coordinates { + if v == nil { + return nil + } + vv, ok := v.v.ValueCoordinates() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueRect() *Rect { + if v == nil { + return nil + } + vv, ok := v.v.ValueRect() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValuePolygon() *Polygon { + if v == nil { + return nil + } + vv, ok := v.v.ValuePolygon() + if ok { + return &vv + } + return nil +} + +func ValueFromStringOrNumber(s string) *Value { + if s == "true" || s == "false" || s == "TRUE" || s == "FALSE" || s == "True" || s == "False" { + return ValueTypeBool.ValueFrom(s) + } + + if v := ValueTypeNumber.ValueFrom(s); v != nil { + return v + } + + return ValueTypeString.ValueFrom(s) +} diff --git a/server/pkg/dataset/value_optional.go b/server/pkg/dataset/value_optional.go new file mode 100644 index 000000000..ac4225066 --- /dev/null +++ b/server/pkg/dataset/value_optional.go @@ -0,0 +1,89 @@ +package dataset + +import "github.com/reearth/reearth-backend/pkg/value" + +type OptionalValue struct { + ov value.Optional +} + +func NewOptionalValue(t ValueType, v *Value) *OptionalValue { + var vv *value.Value + if v != nil { + vv = &v.v + } + ov := value.NewOptional(value.Type(t), vv) + if ov == nil { + return nil + } + return &OptionalValue{ov: *ov} +} + +func OptionalValueFrom(v *Value) *OptionalValue { + if v == nil { + return nil + } + ov := value.OptionalFrom(&v.v) + if ov == nil { + return nil + } + return &OptionalValue{ + ov: *ov, + } +} + +func (ov *OptionalValue) Type() ValueType { + if ov == nil { + return ValueTypeUnknown + } + return ValueType(ov.ov.Type()) +} + +func (ov *OptionalValue) Value() *Value { + if ov == nil { + return nil + } + vv := ov.ov.Value() + if vv == nil { + return nil + } + return &Value{v: *vv} +} + +func (ov *OptionalValue) TypeAndValue() (ValueType, *Value) { + return ov.Type(), ov.Value() +} + +func (ov *OptionalValue) SetValue(v *Value) { + if ov == nil { + return + } + if v == nil { + ov.ov.SetValue(nil) + } else { + ov.ov.SetValue(&v.v) + } +} + +func (ov *OptionalValue) Clone() *OptionalValue { + if ov == nil { + return nil + } + nov := ov.ov.Clone() + if nov == nil { + return nil + } + return &OptionalValue{ + ov: *nov, + } +} + +func (ov *OptionalValue) Cast(t ValueType) *OptionalValue { + if ov == nil { + return nil + } + vv := ov.ov.Cast(value.Type(t), nil) + if vv == nil { + return nil + } + return &OptionalValue{ov: *vv} +} diff --git a/server/pkg/dataset/value_optional_test.go b/server/pkg/dataset/value_optional_test.go new file mode 100644 index 000000000..2ad65aa26 --- /dev/null +++ b/server/pkg/dataset/value_optional_test.go @@ -0,0 +1,361 @@ +package dataset + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/value" + "github.com/stretchr/testify/assert" +) + +func TestNewNilableValue(t *testing.T) { + type args struct { + t ValueType + v *Value + } + + tests := []struct { + name string + args args + want *OptionalValue + }{ + { + name: "default type", + args: args{ + t: ValueTypeString, + v: ValueTypeString.ValueFrom("foo"), + }, + want: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foo", nil))}, + }, + { + name: "nil value", + args: args{ + t: ValueTypeString, + }, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + }, + { + name: "invalid value", + args: args{ + t: ValueTypeNumber, + v: ValueTypeString.ValueFrom("foo"), + }, + want: nil, + }, + { + name: "invalid type", + args: args{ + t: ValueTypeUnknown, + v: ValueTypeString.ValueFrom("foo"), + }, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, NewOptionalValue(tt.args.t, tt.args.v)) + }) + } +} + +func TestOptionalValueFrom(t *testing.T) { + type args struct { + v *Value + } + + tests := []struct { + name string + args args + want *OptionalValue + }{ + { + name: "default type", + args: args{ + v: ValueTypeString.ValueFrom("foo"), + }, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, value.TypeString.ValueFrom("foo", nil))}, + }, + { + name: "empty value", + args: args{ + v: &Value{v: value.Value{}}, + }, + want: nil, + }, + { + name: "nil value", + args: args{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, OptionalValueFrom(tt.args.v)) + }) + } +} + +func TestOptionalValue_Type(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + want ValueType + }{ + { + name: "ok", + value: &OptionalValue{ov: *value.NewOptional(value.TypeBool, nil)}, + want: ValueTypeBool, + }, + { + name: "empty", + value: &OptionalValue{}, + want: ValueTypeUnknown, + }, + { + name: "nil", + value: nil, + want: ValueTypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Type()) + }) + } +} + +func TestOptionalValue_Value(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + want *Value + }{ + { + name: "ok", + value: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foobar", nil))}, + want: ValueTypeString.ValueFrom("foobar"), + }, + { + name: "empty", + value: &OptionalValue{}, + want: nil, + }, + { + name: "nil", + value: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.value.Value() + assert.Equal(t, tt.want, res) + if res != nil { + assert.NotSame(t, tt.want, res) + } + }) + } +} + +func TestOptionalValue_TypeAndValue(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + wantt ValueType + wantv *Value + }{ + { + name: "ok", + value: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foobar", nil))}, + wantt: ValueTypeString, + wantv: ValueTypeString.ValueFrom("foobar"), + }, + { + name: "empty", + value: &OptionalValue{}, + wantt: ValueTypeUnknown, + wantv: nil, + }, + { + name: "nil", + value: nil, + wantt: ValueTypeUnknown, + wantv: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ty, tv := tt.value.TypeAndValue() + assert.Equal(t, tt.wantt, ty) + assert.Equal(t, tt.wantv, tv) + if tv != nil { + assert.NotSame(t, tt.wantv, tv) + } + }) + } +} + +func TestOptionalValue_SetValue(t *testing.T) { + type args struct { + v *Value + } + + tests := []struct { + name string + value *OptionalValue + args args + invalid bool + }{ + { + name: "set", + value: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foo", nil))}, + args: args{v: ValueTypeString.ValueFrom("foobar")}, + }, + { + name: "set to nil", + value: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + args: args{v: ValueTypeString.ValueFrom("foobar")}, + }, + { + name: "invalid value", + value: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + args: args{v: ValueTypeNumber.ValueFrom(1)}, + invalid: true, + }, + { + name: "nil value", + args: args{v: ValueTypeNumber.ValueFrom(1)}, + }, + { + name: "empty", + value: &OptionalValue{}, + args: args{v: ValueTypeNumber.ValueFrom(1)}, + invalid: true, + }, + { + name: "nil", + args: args{v: ValueTypeNumber.ValueFrom(1)}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + var v *Value + if tt.value != nil { + v = tt.value.Value() + } + + tt.value.SetValue(tt.args.v) + + if tt.value != nil { + if tt.invalid { + assert.Equal(t, v, tt.value.Value()) + } else { + assert.Equal(t, tt.args.v, tt.value.Value()) + } + } + }) + } +} + +func TestOptionalValue_Clone(t *testing.T) { + tests := []struct { + name string + target *OptionalValue + }{ + { + name: "ok", + target: &OptionalValue{ + ov: *value.NewOptional(value.TypeString, value.TypeString.ValueFrom("foo", nil)), + }, + }, + { + name: "empty", + target: &OptionalValue{}, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Clone() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestOptionalValue_Cast(t *testing.T) { + type args struct { + t ValueType + } + + tests := []struct { + name string + target *OptionalValue + args args + want *OptionalValue + }{ + { + name: "diff type", + target: &OptionalValue{ov: *value.OptionalFrom(value.TypeNumber.ValueFrom(1.1, nil))}, + args: args{t: ValueTypeString}, + want: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("1.1", nil))}, + }, + { + name: "same type", + target: &OptionalValue{ov: *value.OptionalFrom(value.TypeNumber.ValueFrom(1.1, nil))}, + args: args{t: ValueTypeNumber}, + want: &OptionalValue{ov: *value.OptionalFrom(value.TypeNumber.ValueFrom(1.1, nil))}, + }, + { + name: "failed to cast", + target: &OptionalValue{ov: *value.OptionalFrom(value.TypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}, nil))}, + args: args{t: ValueTypeString}, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + }, + { + name: "empty", + target: &OptionalValue{}, + args: args{t: ValueTypeString}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{t: ValueTypeString}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Cast(tt.args.t)) + }) + } +} diff --git a/server/pkg/dataset/value_test.go b/server/pkg/dataset/value_test.go new file mode 100644 index 000000000..1ec0980f8 --- /dev/null +++ b/server/pkg/dataset/value_test.go @@ -0,0 +1,289 @@ +package dataset + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/value" + "github.com/stretchr/testify/assert" +) + +func TestValueType_None(t *testing.T) { + tests := []struct { + name string + tr ValueType + want *OptionalValue + }{ + { + name: "default", + tr: ValueTypeString, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + }, + { + name: "unknown", + tr: ValueTypeUnknown, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.tr.None()) + }) + } +} + +func TestValue_IsEmpty(t *testing.T) { + tests := []struct { + name string + value *Value + want bool + }{ + { + name: "empty", + want: true, + }, + { + name: "nil", + want: true, + }, + { + name: "non-empty", + value: ValueTypeString.ValueFrom("foo"), + want: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.IsEmpty()) + }) + } +} + +func TestValue_Clone(t *testing.T) { + tests := []struct { + name string + value *Value + want *Value + }{ + { + name: "ok", + value: ValueTypeString.ValueFrom("foo"), + want: &Value{ + v: *value.TypeString.ValueFrom("foo", nil), + }, + }, + { + name: "nil", + value: nil, + want: nil, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Clone()) + }) + } +} + +func TestValue_Some(t *testing.T) { + tests := []struct { + name string + value *Value + want *OptionalValue + }{ + { + name: "ok", + value: &Value{ + v: *value.TypeString.ValueFrom("foo", nil), + }, + want: &OptionalValue{ + ov: *value.OptionalFrom(value.TypeString.ValueFrom("foo", nil)), + }, + }, + { + name: "nil", + value: nil, + want: nil, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Some()) + }) + } +} + +func TestValue_Value(t *testing.T) { + tests := []struct { + name string + value *Value + want interface{} + }{ + { + name: "ok", + value: ValueTypeString.ValueFrom("foo"), + want: "foo", + }, + { + name: "empty", + value: &Value{}, + }, + { + name: "nil", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.want == nil { + assert.Nil(t, tt.value.Value()) + } else { + assert.Equal(t, tt.want, tt.value.Value()) + } + }) + } +} + +func TestValue_Type(t *testing.T) { + tests := []struct { + name string + value *Value + want ValueType + }{ + { + name: "ok", + value: ValueTypeString.ValueFrom("foo"), + want: ValueTypeString, + }, + { + name: "empty", + value: &Value{}, + want: ValueTypeUnknown, + }, + { + name: "nil", + want: ValueTypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Type()) + }) + } +} + +func TestValue_Interface(t *testing.T) { + tests := []struct { + name string + value *Value + want interface{} + }{ + { + name: "string", + value: ValueTypeString.ValueFrom("foo"), + want: "foo", + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + { + name: "nil", + value: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Interface()) + }) + } +} + +func TestValue_Cast(t *testing.T) { + type args struct { + t ValueType + } + + tests := []struct { + name string + target *Value + args args + want *Value + }{ + { + name: "diff type", + target: ValueTypeNumber.ValueFrom(1.1), + args: args{t: ValueTypeString}, + want: ValueTypeString.ValueFrom("1.1"), + }, + { + name: "same type", + target: ValueTypeNumber.ValueFrom(1.1), + args: args{t: ValueTypeNumber}, + want: ValueTypeNumber.ValueFrom(1.1), + }, + { + name: "failed to cast", + target: ValueTypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}), + args: args{t: ValueTypeString}, + want: nil, + }, + { + name: "invalid type", + target: ValueTypeNumber.ValueFrom(1.1), + args: args{t: ValueTypeUnknown}, + want: nil, + }, + { + name: "empty", + target: &Value{}, + args: args{t: ValueTypeString}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{t: ValueTypeString}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Cast(tt.args.t)) + }) + } +} diff --git a/server/pkg/file/file.go b/server/pkg/file/file.go new file mode 100644 index 000000000..890f9d5c4 --- /dev/null +++ b/server/pkg/file/file.go @@ -0,0 +1,159 @@ +// Package file provides convenient helpers for files and abstractions of files +package file + +import ( + "io" + "io/fs" + "strings" + + "github.com/spf13/afero" +) + +// File abstracts an abstract file +type File struct { + Content io.ReadCloser + Path string + Size int64 + // If the content type is not explicitly specified, ContenType will be an empty string. + ContentType string +} + +// Iterator is an iterator of files +type Iterator interface { + // Next returns the next File. If there is no next File, returns nil file and nil error + Next() (*File, error) +} + +// For debugging +type SimpleIterator struct { + c int + files []File +} + +func NewSimpleIterator(files []File) *SimpleIterator { + files2 := make([]File, len(files)) + copy(files2, files) + return &SimpleIterator{ + files: files2, + } +} + +func (s *SimpleIterator) Next() (*File, error) { + if len(s.files) <= s.c { + return nil, nil + } + n := s.files[s.c] + s.c++ + return &n, nil +} + +type PrefixIterator struct { + a Iterator + prefix string +} + +func NewPrefixIterator(a Iterator, prefix string) *PrefixIterator { + return &PrefixIterator{ + a: a, + prefix: prefix, + } +} + +func (s *PrefixIterator) Next() (*File, error) { + for { + n, err := s.a.Next() + if err != nil { + return nil, err + } + if n == nil { + return nil, nil + } + if s.prefix == "" { + return n, nil + } + if strings.HasPrefix(n.Path, s.prefix+"/") { + n2 := *n + n2.Path = strings.TrimPrefix(n2.Path, s.prefix+"/") + return &n2, nil + } + } +} + +type FilteredIterator struct { + a Iterator + skipper func(p string) bool +} + +func NewFilteredIterator(a Iterator, skipper func(p string) bool) *FilteredIterator { + return &FilteredIterator{ + a: a, + skipper: skipper, + } +} + +func (s *FilteredIterator) Next() (*File, error) { + for { + n, err := s.a.Next() + if err != nil { + return nil, err + } + if n == nil { + return nil, nil + } + if !s.skipper(n.Path) { + return n, nil + } + } +} + +type FsIterator struct { + fs afero.Fs + files []string + c int +} + +func NewFsIterator(afs afero.Fs) (*FsIterator, error) { + var files []string + var size int64 + + if err := afero.Walk(afs, "", func(path string, info fs.FileInfo, err error) error { + if info.IsDir() { + return nil + } + files = append(files, path) + size += info.Size() + return nil + }); err != nil { + return nil, err + } + + return &FsIterator{ + fs: afs, + files: files, + c: 0, + }, nil +} + +func (a *FsIterator) Next() (*File, error) { + if len(a.files) <= a.c { + return nil, nil + } + + next := a.files[a.c] + a.c++ + fi, err := a.fs.Open(next) + if err != nil { + return nil, err + } + + stat, err := fi.Stat() + if err != nil { + return nil, err + } + + return &File{ + Content: fi, + Path: next, + Size: stat.Size(), + }, nil +} diff --git a/server/pkg/file/file_test.go b/server/pkg/file/file_test.go new file mode 100644 index 000000000..5970c13df --- /dev/null +++ b/server/pkg/file/file_test.go @@ -0,0 +1,197 @@ +package file + +import ( + "io" + "os" + "testing" + + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestReaders(t *testing.T) { + zf, err := os.Open("testdata/test.zip") + assert.NoError(t, err) + defer func() { + _ = zf.Close() + }() + zr, err := ZipReaderFrom(zf, 1024) + assert.NoError(t, err) + + tf, err := os.Open("testdata/test.tar.gz") + assert.NoError(t, err) + defer func() { + _ = tf.Close() + }() + tr, err := TarReaderFromTarGz(tf) + assert.NoError(t, err) + + files := map[string]string{ + "reearth.json": "{\n \"reearth\": \"Re:Earth\"\n}\n", + "index.js": "console.log(\"hello world\");\n", + "test/foo.bar": "test\n", + } + + tests := []struct { + Name string + Archive Iterator + Files []string + }{ + { + Name: "zip", + Archive: zr, + Files: []string{"test/foo.bar", "index.js", "reearth.json"}, + }, + { + Name: "tar", + Archive: tr, + Files: []string{"test/foo.bar", "index.js", "reearth.json"}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + // t.Parallel() cannot be used + assert := assert.New(t) + + for i, f := range tc.Files { + n, err := tc.Archive.Next() + assert.NoError(err) + assert.Equal(f, n.Path, "file %d in %s", i, tc.Name) + assert.Equal(int64(len(files[f])), n.Size, "file %d in %s", i, tc.Name) + assert.Equal("", n.ContentType, "file %d in %s", i, tc.Name) + + fc, err := io.ReadAll(n.Content) + assert.NoError(err) + assert.Equal(files[f], string(fc)) + + assert.NoError(n.Content.Close()) + } + + n, err := tc.Archive.Next() + assert.Nil(err) + assert.Nil(n) + + n, err = tc.Archive.Next() + assert.Nil(err) + assert.Nil(n) + }) + } +} + +func TestSimpleIterator(t *testing.T) { + a := NewSimpleIterator([]File{{Path: "a"}, {Path: "b"}, {Path: "c"}}) + + n, err := a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "a"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "b"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "c"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Nil(t, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Nil(t, n) +} + +func TestPrefixIterator(t *testing.T) { + ba := NewSimpleIterator([]File{ + {Path: "a"}, {Path: "b"}, {Path: "c/d"}, {Path: "e"}, {Path: "f/g/h"}, {Path: "c/i/j"}, + }) + a := NewPrefixIterator(ba, "c") + + n, err := a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "d"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "i/j"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Nil(t, n) + + ba2 := NewSimpleIterator([]File{ + {Path: "a"}, {Path: "b"}, + }) + a2 := NewPrefixIterator(ba2, "") + + n2, err := a2.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "a"}, n2) + + n2, err = a2.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "b"}, n2) + + n2, err = a2.Next() + assert.NoError(t, err) + assert.Nil(t, n2) +} + +func TestFilteredIterator(t *testing.T) { + var paths []string + ba := NewSimpleIterator([]File{ + {Path: "0"}, {Path: "1"}, {Path: "2"}, + }) + a := NewFilteredIterator(ba, func(p string) bool { + paths = append(paths, p) + return p == "1" + }) + + n, err := a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "0"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "2"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Nil(t, n) + assert.Equal(t, []string{"0", "1", "2"}, paths) +} + +func TestFsIterator(t *testing.T) { + fs := afero.NewMemMapFs() + _ = fs.MkdirAll("a/b", 0755) + f, _ := fs.Create("b") + _, _ = f.WriteString("hello") + _ = f.Close() + _, _ = fs.Create("a/b/c") + + a, err := NewFsIterator(fs) + assert.NoError(t, err) + + n, err := a.Next() + assert.NoError(t, err) + assert.Equal(t, "a/b/c", n.Path) + nd, err := io.ReadAll(n.Content) + assert.NoError(t, err) + assert.Equal(t, []byte{}, nd) + assert.NoError(t, n.Content.Close()) + + n, err = a.Next() + assert.NoError(t, err) + assert.Equal(t, "b", n.Path) + nd, err = io.ReadAll(n.Content) + assert.NoError(t, err) + assert.Equal(t, "hello", string(nd)) + assert.NoError(t, n.Content.Close()) + + n, err = a.Next() + assert.NoError(t, err) + assert.Nil(t, n) +} diff --git a/server/pkg/file/targz.go b/server/pkg/file/targz.go new file mode 100644 index 000000000..e4be21db6 --- /dev/null +++ b/server/pkg/file/targz.go @@ -0,0 +1,45 @@ +package file + +import ( + "archive/tar" + "compress/gzip" + "errors" + "io" +) + +type TarReader struct { + tr *tar.Reader +} + +func NewTarReader(tr *tar.Reader) *TarReader { + return &TarReader{tr: tr} +} + +func TarReaderFromTarGz(r io.Reader) (*TarReader, error) { + gzipReader, err := gzip.NewReader(r) + if err != nil { + return nil, err + } + return &TarReader{tr: tar.NewReader(gzipReader)}, nil +} + +func (r *TarReader) Next() (*File, error) { + if r == nil || r.tr == nil { + return nil, nil + } + + h, err := r.tr.Next() + if errors.Is(err, io.EOF) { + return nil, nil + } + if err != nil { + return nil, err + } + + fi := h.FileInfo() + if fi.IsDir() { + return r.Next() + } + + return &File{Content: io.NopCloser(r.tr), Path: h.Name, Size: fi.Size()}, nil +} diff --git a/server/pkg/file/testdata/test.tar.gz b/server/pkg/file/testdata/test.tar.gz new file mode 100644 index 000000000..c2f71e472 Binary files /dev/null and b/server/pkg/file/testdata/test.tar.gz differ diff --git a/server/pkg/file/testdata/test.zip b/server/pkg/file/testdata/test.zip new file mode 100644 index 000000000..cad689137 Binary files /dev/null and b/server/pkg/file/testdata/test.zip differ diff --git a/server/pkg/file/zip.go b/server/pkg/file/zip.go new file mode 100644 index 000000000..8a6a71b6c --- /dev/null +++ b/server/pkg/file/zip.go @@ -0,0 +1,87 @@ +package file + +import ( + "archive/zip" + "bytes" + "io" + "strings" +) + +type ZipReader struct { + zr *zip.Reader + i int +} + +func NewZipReader(zr *zip.Reader) *ZipReader { + return &ZipReader{zr: zr} +} + +func ZipReaderFrom(r io.Reader, n int64) (*ZipReader, error) { + b, err := io.ReadAll(io.LimitReader(r, n)) + if err != nil { + return nil, err + } + + zr, err := zip.NewReader(bytes.NewReader(b), int64(len(b))) + if err != nil { + return nil, err + } + + return NewZipReader(zr), nil +} + +func (r *ZipReader) Next() (*File, error) { + if r == nil || r.zr == nil { + return nil, nil + } + + if len(r.zr.File) <= r.i { + return nil, nil + } + + f := r.zr.File[r.i] + r.i++ + + fi := f.FileInfo() + if fi.IsDir() { + return r.Next() + } + + c, err := f.Open() + if err != nil { + return nil, err + } + + return &File{Content: c, Path: f.Name, Size: fi.Size()}, nil +} + +func MockZipReader(files []string) *zip.Reader { + b := new(bytes.Buffer) + w := zip.NewWriter(b) + for _, f := range files { + _, _ = w.Create(f) + } + _ = w.Close() + b2 := b.Bytes() + zr, _ := zip.NewReader(bytes.NewReader(b2), int64(len(b2))) + return zr +} + +func ZipBasePath(zr *zip.Reader) (b string) { + for _, f := range zr.File { + fp := strings.Split(f.Name, "/") + if len(fp) <= 1 { + // a file is existing in the root + return "" + } + // extract root directory name + if len(fp) == 2 && fp[1] == "" { + if b != "" { + // there are multiple directories on the root + return "" + } + b = fp[0] + } + } + return +} diff --git a/server/pkg/file/zip_test.go b/server/pkg/file/zip_test.go new file mode 100644 index 000000000..2c4e7fb77 --- /dev/null +++ b/server/pkg/file/zip_test.go @@ -0,0 +1,32 @@ +package file + +import ( + "io" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestMockZipReader(t *testing.T) { + z := MockZipReader([]string{"a", "b", "c/", "c/d"}) + assert.Equal(t, "a", z.File[0].Name) + assert.Equal(t, "b", z.File[1].Name) + assert.Equal(t, "c/", z.File[2].Name) + assert.Equal(t, "c/d", z.File[3].Name) + + for _, f := range []string{"a", "b", "c/d"} { + zf, err := z.Open(f) + assert.NoError(t, err) + b, err := io.ReadAll(zf) + assert.NoError(t, err) + assert.Equal(t, []byte{}, b) + assert.NoError(t, zf.Close()) + } +} + +func TestZipBasePath(t *testing.T) { + assert.Equal(t, "aaa", ZipBasePath(MockZipReader([]string{"aaa/", "aaa/a"}))) + assert.Equal(t, "", ZipBasePath(MockZipReader([]string{"aaa/", "aaa/a", "b"}))) + assert.Equal(t, "", ZipBasePath(MockZipReader([]string{"aaa"}))) + assert.Equal(t, "", ZipBasePath(MockZipReader([]string{"aaa/", "aaa/a", "b/", "b/c"}))) +} diff --git a/server/pkg/i18n/string.go b/server/pkg/i18n/string.go new file mode 100644 index 000000000..410b84fed --- /dev/null +++ b/server/pkg/i18n/string.go @@ -0,0 +1,72 @@ +package i18n + +const DefaultLang = "en" + +type String map[string]string // key should use BCP 47 representation + +func StringFrom(s string) String { + if s == "" { + return String{} + } + return String{DefaultLang: s} +} + +func (s String) WithDefault(d string) String { + if s == nil && d == "" { + return nil + } + + res := s.Clone() + if res == nil { + res = String{} + } + if d != "" { + res[DefaultLang] = d + } + return res +} + +func (s String) WithDefaultRef(d *string) String { + if d == nil { + return s.Clone() + } + return s.WithDefault(*d) +} + +func (s String) Translated(lang ...string) string { + if s == nil { + return "" + } + for _, l := range lang { + if s, ok := s[l]; ok { + return s + } + } + return s.String() +} + +func (s String) Clone() String { + if len(s) == 0 { + return nil + } + s2 := make(String, len(s)) + for k, v := range s { + s2[k] = v + } + return s2 +} + +func (s String) String() string { + if s == nil { + return "" + } + return s[DefaultLang] +} + +func (s String) StringRef() *string { + if s == nil { + return nil + } + st := s[DefaultLang] + return &st +} diff --git a/server/pkg/i18n/string_test.go b/server/pkg/i18n/string_test.go new file mode 100644 index 000000000..8aa6affc1 --- /dev/null +++ b/server/pkg/i18n/string_test.go @@ -0,0 +1,248 @@ +package i18n + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestString_String(t *testing.T) { + tests := []struct { + Name, Expected string + Target String + }{ + { + Name: "en string", + Expected: "foo", + Target: String{"en": "foo"}, + }, + { + Name: "nil string", + Expected: "", + Target: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.Target.String()) + }) + } +} + +func TestString_WithDefault(t *testing.T) { + tests := []struct { + Name string + Target String + Input string + Expected String + }{ + { + Name: "ok", + Target: String{"en": "foo", "ja": "bar"}, + Input: "x", + Expected: String{"en": "x", "ja": "bar"}, + }, + { + Name: "empty default", + Target: String{"en": "foo"}, + Input: "", + Expected: String{"en": "foo"}, + }, + { + Name: "empty", + Target: String{}, + Input: "x", + Expected: String{"en": "x"}, + }, + { + Name: "empty string and empty default", + Target: String{}, + Input: "", + Expected: String{}, + }, + { + Name: "nil string", + Target: nil, + Input: "x", + Expected: String{"en": "x"}, + }, + { + Name: "nil string and empty default", + Target: nil, + Input: "", + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.Target.WithDefault(tc.Input)) + }) + } +} + +func TestString_WithDefaultRef(t *testing.T) { + tests := []struct { + Name string + Target String + Input *string + Expected String + }{ + { + Name: "ok", + Target: String{"en": "foo", "ja": "bar"}, + Input: sr("x"), + Expected: String{"en": "x", "ja": "bar"}, + }, + { + Name: "nil default", + Target: String{"en": "foo", "ja": "bar"}, + Input: nil, + Expected: String{"en": "foo", "ja": "bar"}, + }, + { + Name: "empty default", + Target: String{"en": "foo"}, + Input: sr(""), + Expected: String{"en": "foo"}, + }, + { + Name: "empty", + Target: String{}, + Input: sr("x"), + Expected: String{"en": "x"}, + }, + { + Name: "empty string and empty default", + Target: String{}, + Input: sr(""), + Expected: String{}, + }, + { + Name: "nil string", + Target: nil, + Input: sr("x"), + Expected: String{"en": "x"}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.Target.WithDefaultRef(tc.Input)) + }) + } +} + +func TestStringTranslated(t *testing.T) { + tests := []struct { + Name, Lang, ExpectedStr string + I18nString String + }{ + { + Name: "ja string", + Lang: "ja", + ExpectedStr: "fooJA", + I18nString: String{"ja": "fooJA"}, + }, + { + Name: "default string", + ExpectedStr: "foo", + Lang: "", + I18nString: String{"en": "foo"}, + }, + { + Name: "nil string", + ExpectedStr: "", + Lang: "", + I18nString: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.ExpectedStr, tc.I18nString.Translated(tc.Lang)) + }) + } +} + +func TestStringFrom(t *testing.T) { + assert.Equal(t, String{"en": "foo"}, StringFrom("foo")) + assert.Equal(t, String{}, StringFrom("")) +} + +func TestString_Clone(t *testing.T) { + tests := []struct { + Name string + Target, Expected String + }{ + { + Name: "String with content", + Target: String{"ja": "foo"}, + Expected: String{"ja": "foo"}, + }, + { + Name: "empty String", + Target: String{}, + Expected: nil, + }, + { + Name: "nil", + Target: nil, + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.Target.Clone() + assert.Equal(t, tc.Expected, res) + assert.NotSame(t, tc.Target, res) + }) + } +} + +func TestString_StringRef(t *testing.T) { + stringRef := func(s string) *string { + return &s + } + + tests := []struct { + Name string + I18nString String + Expected *string + }{ + { + Name: "en string", + I18nString: String{"en": "foo"}, + Expected: stringRef("foo"), + }, + { + Name: "nil string", + I18nString: nil, + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.I18nString.StringRef()) + }) + } +} + +func sr(s string) *string { + return &s +} diff --git a/server/pkg/id/common.go b/server/pkg/id/common.go new file mode 100644 index 000000000..28b5aa31d --- /dev/null +++ b/server/pkg/id/common.go @@ -0,0 +1,5 @@ +package id + +import "github.com/reearth/reearth-backend/pkg/id/idx" + +var ErrInvalidID = idx.ErrInvalidID diff --git a/server/pkg/id/id.go b/server/pkg/id/id.go new file mode 100644 index 000000000..a2f5e3e14 --- /dev/null +++ b/server/pkg/id/id.go @@ -0,0 +1,204 @@ +package id + +import "github.com/reearth/reearth-backend/pkg/id/idx" + +type Asset struct{} +type AuthRequest struct{} +type Dataset struct{} +type DatasetField struct{} +type DatasetSchema struct{} +type Cluster struct{} +type InfoboxField struct{} +type Layer struct{} +type PluginExtension struct{} +type Project struct{} +type Property struct{} +type PropertyItem struct{} +type PropertyField struct{} +type PropertySchemaGroup struct{} +type Scene struct{} +type Tag struct{} +type Team struct{} +type User struct{} +type Widget struct{} + +func (Asset) Type() string { return "asset" } +func (AuthRequest) Type() string { return "authRequest" } +func (Dataset) Type() string { return "dataset" } +func (DatasetField) Type() string { return "datasetField" } +func (DatasetSchema) Type() string { return "datasetSchema" } +func (Cluster) Type() string { return "cluster" } +func (InfoboxField) Type() string { return "infoboxField" } +func (Layer) Type() string { return "layer" } +func (PluginExtension) Type() string { return "pluginExtension" } +func (Project) Type() string { return "project" } +func (Property) Type() string { return "property" } +func (PropertyItem) Type() string { return "propertyItem" } +func (PropertyField) Type() string { return "propertyField" } +func (PropertySchemaGroup) Type() string { return "propertySchemaGroup" } +func (Scene) Type() string { return "scene" } +func (Tag) Type() string { return "tag" } +func (Team) Type() string { return "team" } +func (User) Type() string { return "user" } +func (Widget) Type() string { return "widget" } + +type AssetID = idx.ID[Asset] +type AuthRequestID = idx.ID[AuthRequest] +type DatasetID = idx.ID[Dataset] +type DatasetFieldID = idx.ID[DatasetField] +type DatasetSchemaID = idx.ID[DatasetSchema] +type ClusterID = idx.ID[Cluster] +type InfoboxFieldID = idx.ID[InfoboxField] +type LayerID = idx.ID[Layer] +type ProjectID = idx.ID[Project] +type PropertyID = idx.ID[Property] +type PropertyItemID = idx.ID[PropertyItem] +type SceneID = idx.ID[Scene] +type TagID = idx.ID[Tag] +type TeamID = idx.ID[Team] +type UserID = idx.ID[User] +type WidgetID = idx.ID[Widget] + +type PluginExtensionID = idx.StringID[PluginExtension] +type PropertySchemaGroupID = idx.StringID[PropertySchemaGroup] +type PropertyFieldID = idx.StringID[PropertyField] + +var NewAssetID = idx.New[Asset] +var NewAuthRequestID = idx.New[AuthRequest] +var NewDatasetID = idx.New[Dataset] +var NewDatasetFieldID = idx.New[DatasetField] +var NewDatasetSchemaID = idx.New[DatasetSchema] +var NewClusterID = idx.New[Cluster] +var NewInfoboxFieldID = idx.New[InfoboxField] +var NewLayerID = idx.New[Layer] +var NewProjectID = idx.New[Project] +var NewPropertyID = idx.New[Property] +var NewPropertyItemID = idx.New[PropertyItem] +var NewSceneID = idx.New[Scene] +var NewTagID = idx.New[Tag] +var NewTeamID = idx.New[Team] +var NewUserID = idx.New[User] +var NewWidgetID = idx.New[Widget] + +var MustAssetID = idx.Must[Asset] +var MustAuthRequestID = idx.Must[AuthRequest] +var MustDatasetID = idx.Must[Dataset] +var MustDatasetFieldID = idx.Must[DatasetField] +var MustDatasetSchemaID = idx.Must[DatasetSchema] +var MustClusterID = idx.Must[Cluster] +var MustInfoboxFieldID = idx.Must[InfoboxField] +var MustLayerID = idx.Must[Layer] +var MustProjectID = idx.Must[Project] +var MustPropertyID = idx.Must[Property] +var MustPropertyItemID = idx.Must[PropertyItem] +var MustSceneID = idx.Must[Scene] +var MustTagID = idx.Must[Tag] +var MustTeamID = idx.Must[Team] +var MustUserID = idx.Must[User] +var MustWidgetID = idx.Must[Widget] + +var AssetIDFrom = idx.From[Asset] +var AuthRequestIDFrom = idx.From[AuthRequest] +var DatasetIDFrom = idx.From[Dataset] +var DatasetFieldIDFrom = idx.From[DatasetField] +var DatasetSchemaIDFrom = idx.From[DatasetSchema] +var ClusterIDFrom = idx.From[Cluster] +var InfoboxFieldIDFrom = idx.From[InfoboxField] +var LayerIDFrom = idx.From[Layer] +var ProjectIDFrom = idx.From[Project] +var PropertyIDFrom = idx.From[Property] +var PropertyItemIDFrom = idx.From[PropertyItem] +var SceneIDFrom = idx.From[Scene] +var TagIDFrom = idx.From[Tag] +var TeamIDFrom = idx.From[Team] +var UserIDFrom = idx.From[User] +var WidgetIDFrom = idx.From[Widget] + +var AssetIDFromRef = idx.FromRef[Asset] +var AuthRequestIDFromRef = idx.FromRef[AuthRequest] +var DatasetIDFromRef = idx.FromRef[Dataset] +var DatasetFieldIDFromRef = idx.FromRef[DatasetField] +var DatasetSchemaIDFromRef = idx.FromRef[DatasetSchema] +var ClusterIDFromRef = idx.FromRef[Cluster] +var InfoboxFieldIDFromRef = idx.FromRef[InfoboxField] +var LayerIDFromRef = idx.FromRef[Layer] +var ProjectIDFromRef = idx.FromRef[Project] +var PropertyIDFromRef = idx.FromRef[Property] +var PropertyItemIDFromRef = idx.FromRef[PropertyItem] +var SceneIDFromRef = idx.FromRef[Scene] +var TagIDFromRef = idx.FromRef[Tag] +var TeamIDFromRef = idx.FromRef[Team] +var UserIDFromRef = idx.FromRef[User] +var WidgetIDFromRef = idx.FromRef[Widget] + +var PluginExtensionIDFromRef = idx.StringIDFromRef[PluginExtension] +var PropertyFieldIDFromRef = idx.StringIDFromRef[PropertyField] +var PropertySchemaGroupIDFromRef = idx.StringIDFromRef[PropertySchemaGroup] + +type AssetIDList = idx.List[Asset] +type AuthRequestIDList = idx.List[AuthRequest] +type DatasetIDList = idx.List[Dataset] +type DatasetFieldIDList = idx.List[DatasetField] +type DatasetSchemaIDList = idx.List[DatasetSchema] +type ClusterIDList = idx.List[Cluster] +type InfoboxFieldIDList = idx.List[InfoboxField] +type LayerIDList = idx.List[Layer] +type ProjectIDList = idx.List[Project] +type PropertyIDList = idx.List[Property] +type PropertyItemIDList = idx.List[PropertyItem] +type SceneIDList = idx.List[Scene] +type TagIDList = idx.List[Tag] +type TeamIDList = idx.List[Team] +type UserIDList = idx.List[User] +type WidgetIDList = idx.List[Widget] + +var AssetIDListFrom = idx.ListFrom[Asset] +var AuthRequestIDListFrom = idx.ListFrom[AuthRequest] +var DatasetIDListFrom = idx.ListFrom[Dataset] +var DatasetFieldIDListFrom = idx.ListFrom[DatasetField] +var DatasetSchemaIDListFrom = idx.ListFrom[DatasetSchema] +var ClusterIDListFrom = idx.ListFrom[Cluster] +var InfoboxFieldIDListFrom = idx.ListFrom[InfoboxField] +var LayerIDListFrom = idx.ListFrom[Layer] +var ProjectIDListFrom = idx.ListFrom[Project] +var PropertyIDListFrom = idx.ListFrom[Property] +var PropertyItemIDListFrom = idx.ListFrom[PropertyItem] +var SceneIDListFrom = idx.ListFrom[Scene] +var TagIDListFrom = idx.ListFrom[Tag] +var TeamIDListFrom = idx.ListFrom[Team] +var UserIDListFrom = idx.ListFrom[User] +var WidgetIDListFrom = idx.ListFrom[Widget] + +type AssetIDSet = idx.Set[Asset] +type AuthRequestIDSet = idx.Set[AuthRequest] +type DatasetIDSet = idx.Set[Dataset] +type DatasetFieldIDSet = idx.Set[DatasetField] +type DatasetSchemaIDSet = idx.Set[DatasetSchema] +type ClusterIDSet = idx.Set[Cluster] +type InfoboxFieldIDSet = idx.Set[InfoboxField] +type LayerIDSet = idx.Set[Layer] +type ProjectIDSet = idx.Set[Project] +type PropertyIDSet = idx.Set[Property] +type PropertyItemIDSet = idx.Set[PropertyItem] +type SceneIDSet = idx.Set[Scene] +type TagIDSet = idx.Set[Tag] +type TeamIDSet = idx.Set[Team] +type UserIDSet = idx.Set[User] +type WidgetIDSet = idx.Set[Widget] + +var NewAssetIDSet = idx.NewSet[Asset] +var NewAuthRequestIDSet = idx.NewSet[AuthRequest] +var NewDatasetIDSet = idx.NewSet[Dataset] +var NewDatasetFieldIDSet = idx.NewSet[DatasetField] +var NewDatasetSchemaIDSet = idx.NewSet[DatasetSchema] +var NewClusterIDSet = idx.NewSet[Cluster] +var NewInfoboxFieldIDSet = idx.NewSet[InfoboxField] +var NewLayerIDSet = idx.NewSet[Layer] +var NewProjectIDSet = idx.NewSet[Project] +var NewPropertyIDSet = idx.NewSet[Property] +var NewPropertyItemIDSet = idx.NewSet[PropertyItem] +var NewSceneIDSet = idx.NewSet[Scene] +var NewTagIDSet = idx.NewSet[Tag] +var NewTeamIDSet = idx.NewSet[Team] +var NewUserIDSet = idx.NewSet[User] +var NewWidgetIDSet = idx.NewSet[Widget] diff --git a/server/pkg/id/idx/id.go b/server/pkg/id/idx/id.go new file mode 100644 index 000000000..9547dca21 --- /dev/null +++ b/server/pkg/id/idx/id.go @@ -0,0 +1,140 @@ +package idx + +import ( + "errors" + "strings" + "time" + + "github.com/oklog/ulid" + "github.com/reearth/reearth-backend/pkg/util" + "github.com/samber/lo" +) + +var ErrInvalidID = errors.New("invalid ID") + +type Type interface { + Type() string +} + +type ID[T Type] struct { + id ulid.ULID +} + +func New[T Type]() ID[T] { + return ID[T]{id: generateID()} +} + +func NewAll[T Type](n int) (l List[T]) { + if n <= 0 { + return + } + if n == 1 { + return List[T]{New[T]()} + } + return lo.Map(generateAllID(n), func(id ulid.ULID, _ int) ID[T] { + return ID[T]{id: id} + }) +} + +func From[T Type](id string) (ID[T], error) { + parsedID, e := parseID(id) + if e != nil { + return ID[T]{}, ErrInvalidID + } + return ID[T]{id: parsedID}, nil +} + +func Must[T Type](id string) ID[T] { + return util.Must(From[T](id)) +} + +func FromRef[T Type](id *string) *ID[T] { + if id == nil { + return nil + } + nid, err := From[T](*id) + if err != nil { + return nil + } + return &nid +} + +func (id ID[T]) Ref() *ID[T] { + return &id +} + +func (id ID[T]) Clone() ID[T] { + return ID[T]{id: id.id} +} + +func (id *ID[T]) CloneRef() *ID[T] { + if id == nil { + return nil + } + i := id.Clone() + return &i +} + +func (id *ID[T]) CopyRef() *ID[T] { + return id.CloneRef() +} + +func (ID[T]) Type() string { + var t T + return t.Type() +} + +func (id ID[T]) Timestamp() time.Time { + return ulid.Time(id.id.Time()) +} + +// String implements fmt.Stringer interface. +func (id ID[T]) String() string { + if id.IsEmpty() { + return "" + } + return strings.ToLower(ulid.ULID(id.id).String()) +} + +func (id *ID[T]) StringRef() *string { + if id == nil { + return nil + } + s := id.String() + return &s +} + +// GoString implements fmt.GoStringer interface. +func (id ID[T]) GoString() string { + return id.Type() + "ID(" + id.String() + ")" +} + +func (id ID[T]) Compare(id2 ID[T]) int { + return id.id.Compare(id2.id) +} + +func (i ID[T]) Equal(i2 ID[T]) bool { + return i.id.Compare(i2.id) == 0 +} + +func (id ID[T]) IsEmpty() bool { + return id.id.Compare(ulid.ULID{}) == 0 +} + +func (id *ID[T]) IsNil() bool { + return id == nil || (*id).IsEmpty() +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *ID[T]) MarshalText() ([]byte, error) { + if d.IsNil() { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (id *ID[T]) UnmarshalText(b []byte) (err error) { + *id, err = From[T](string(b)) + return +} diff --git a/server/pkg/id/idx/id_test.go b/server/pkg/id/idx/id_test.go new file mode 100644 index 000000000..10b116a58 --- /dev/null +++ b/server/pkg/id/idx/id_test.go @@ -0,0 +1,26 @@ +package idx + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +type TID = ID[T] + +// T is a dummy ID type for unit tests +type T struct{} + +func (T) Type() string { return "_" } + +var idstr = mustParseID("01fzxycwmq7n84q8kessktvb8z") + +func TestID_String(t *testing.T) { + assert.Equal(t, "01fzxycwmq7n84q8kessktvb8z", TID{id: idstr}.String()) + assert.Equal(t, "", ID[T]{}.String()) +} + +func TestID_GoString(t *testing.T) { + assert.Equal(t, "_ID(01fzxycwmq7n84q8kessktvb8z)", TID{id: idstr}.GoString()) + assert.Equal(t, "_ID()", TID{}.GoString()) +} diff --git a/server/pkg/id/idx/list.go b/server/pkg/id/idx/list.go new file mode 100644 index 000000000..8c8dcffdc --- /dev/null +++ b/server/pkg/id/idx/list.go @@ -0,0 +1,115 @@ +package idx + +import ( + "github.com/reearth/reearth-backend/pkg/util" + "golang.org/x/exp/slices" +) + +type List[T Type] []ID[T] + +type RefList[T Type] []*ID[T] + +func ListFrom[T Type](ids []string) (List[T], error) { + return util.TryMap(ids, From[T]) +} + +func MustList[T Type](ids []string) List[T] { + return util.Must(ListFrom[T](ids)) +} + +func (l List[T]) list() util.List[ID[T]] { + return util.List[ID[T]](l) +} + +func (l List[T]) Has(ids ...ID[T]) bool { + return l.list().Has(ids...) +} + +func (l List[T]) At(i int) *ID[T] { + return l.list().At(i) +} + +func (l List[T]) Index(id ID[T]) int { + return l.list().Index(id) +} + +func (l List[T]) Len() int { + return l.list().Len() +} + +func (l List[T]) Ref() *List[T] { + return (*List[T])(l.list().Ref()) +} + +func (l List[T]) Refs() RefList[T] { + return l.list().Refs() +} + +func (l List[T]) Delete(ids ...ID[T]) List[T] { + return List[T](l.list().Delete(ids...)) +} + +func (l List[T]) DeleteAt(i int) List[T] { + return List[T](l.list().DeleteAt(i)) +} + +func (l List[T]) Add(ids ...ID[T]) List[T] { + return List[T](l.list().Add(ids...)) +} + +func (l List[T]) AddUniq(ids ...ID[T]) List[T] { + return List[T](l.list().AddUniq(ids...)) +} + +func (l List[T]) Insert(i int, ids ...ID[T]) List[T] { + return List[T](l.list().Insert(i, ids...)) +} + +func (l List[T]) Move(e ID[T], to int) List[T] { + return List[T](l.list().Move(e, to)) +} + +func (l List[T]) MoveAt(from, to int) List[T] { + return List[T](l.list().MoveAt(from, to)) +} + +func (l List[T]) Reverse() List[T] { + return List[T](l.list().Reverse()) +} + +func (l List[T]) Concat(m List[T]) List[T] { + return List[T](l.list().Concat(m)) +} + +func (l List[T]) Intersect(m List[T]) List[T] { + return List[T](l.list().Intersect(m)) +} + +func (l List[T]) Strings() []string { + return util.Map(l, func(id ID[T]) string { + return id.String() + }) +} + +func (l List[T]) Clone() List[T] { + return util.Map(l, func(id ID[T]) ID[T] { + return id.Clone() + }) +} + +func (l List[T]) Sort() List[T] { + m := l.list().Copy() + slices.SortStableFunc(m, func(a, b ID[T]) bool { + return a.Compare(b) <= 0 + }) + return List[T](m) +} + +func (l RefList[T]) Deref() List[T] { + return util.FilterMap(l, func(id *ID[T]) *ID[T] { + if id != nil && !(*id).IsNil() { + return id + } + return nil + }) +} diff --git a/server/pkg/id/idx/list_test.go b/server/pkg/id/idx/list_test.go new file mode 100644 index 000000000..9552e094d --- /dev/null +++ b/server/pkg/id/idx/list_test.go @@ -0,0 +1,249 @@ +package idx + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestListFrom(t *testing.T) { + id := Must[T]("01g0nzan4qnb2f2s9ehrgv62a3") + + ids, err := ListFrom[T]([]string{"01g0nzan4qnb2f2s9ehrgv62a3"}) + assert.NoError(t, err) + assert.Equal(t, List[T]{id}, ids) + + ids, err = ListFrom[T]([]string{"01g0nzan4qnb2f2s9ehrgv62a3", "a"}) + assert.Equal(t, ErrInvalidID, err) + assert.Nil(t, ids) +} + +func TestMustList(t *testing.T) { + id := Must[T]("01g0nzan4qnb2f2s9ehrgv62a3") + + ids := MustList[T]([]string{"01g0nzan4qnb2f2s9ehrgv62a3"}) + assert.Equal(t, List[T]{id}, ids) + + assert.PanicsWithValue(t, ErrInvalidID, func() { + _ = MustList[T]([]string{"01g0nzan4qnb2f2s9ehrgv62a3", "a"}) + }) +} + +func TestList_Has(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.True(t, l.Has(a)) + assert.True(t, l.Has(a, c)) + assert.False(t, l.Has(c)) + assert.False(t, List[T](nil).Has(a)) +} + +func TestList_At(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).At(0)) + assert.Nil(t, l.At(-1)) + assert.Equal(t, &a, l.At(0)) + assert.Equal(t, &b, l.At(1)) + assert.Nil(t, l.At(2)) +} + +func TestList_Index(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.Equal(t, -1, List[T](nil).Index(a)) + assert.Equal(t, 0, l.Index(a)) + assert.Equal(t, 1, l.Index(b)) + assert.Equal(t, -1, l.Index(c)) +} + +func TestList_Len(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{a, b} + + assert.Equal(t, 0, List[T](nil).Len()) + assert.Equal(t, 2, l.Len()) +} + +func TestList_Ref(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Ref()) + assert.Equal(t, &List[T]{a, b}, l.Ref()) +} + +func TestList_Refs(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Refs()) + assert.Equal(t, RefList[T]{&a, &b}, l.Refs()) +} + +func TestList_Delete(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b, c} + + assert.Nil(t, (List[T])(nil).Delete(b)) + assert.Equal(t, List[T]{a, c}, l.Delete(b)) + assert.Equal(t, List[T]{a, b, c}, l) +} + +func TestList_DeleteAt(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b, c} + + assert.Nil(t, (List[T])(nil).DeleteAt(1)) + assert.Equal(t, List[T]{a, c}, l.DeleteAt(1)) + assert.Equal(t, List[T]{a, b, c}, l) +} + +func TestList_Add(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.Equal(t, List[T]{a, b}, List[T](nil).Add(a, b)) + assert.Equal(t, List[T]{a, b, c, a}, l.Add(c, a)) + assert.Equal(t, List[T]{a, b}, l) +} + +func TestList_AddUniq(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.Equal(t, List[T]{a, b}, List[T](nil).AddUniq(a, b)) + assert.Equal(t, List[T]{a, b, c}, l.AddUniq(c, a)) + assert.Equal(t, List[T]{a, b}, l) +} + +func TestList_Insert(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.Equal(t, List[T]{a, b, c}, l.Insert(-1, c)) + assert.Equal(t, List[T]{c, a, b}, l.Insert(0, c)) + assert.Equal(t, List[T]{a, c, b}, l.Insert(1, c)) + assert.Equal(t, List[T]{a, b, c}, l.Insert(2, c)) + assert.Equal(t, List[T]{a, b, c}, l.Insert(3, c)) +} + +func TestList_Move(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b, c} + + assert.Nil(t, List[T](nil).Move(a, -1)) + assert.Equal(t, List[T]{c, a, b}, l.Move(c, 0)) + assert.Equal(t, List[T]{a, b, c}, l) + assert.Equal(t, List[T]{a, b}, l.Move(c, -1)) + assert.Equal(t, List[T]{c, a, b}, l.Move(c, 0)) + assert.Equal(t, List[T]{a, c, b}, l.Move(b, 10)) +} + +func TestList_MoveAt(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b, c} + + assert.Nil(t, List[T](nil).MoveAt(0, -1)) + assert.Equal(t, List[T]{c, a, b}, l.MoveAt(2, 0)) + assert.Equal(t, List[T]{a, b, c}, l) + assert.Equal(t, List[T]{a, b}, l.MoveAt(2, -1)) + assert.Equal(t, List[T]{c, a, b}, l.MoveAt(2, 0)) + assert.Equal(t, List[T]{a, c, b}, l.MoveAt(1, 10)) +} + +func TestList_Reverse(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b, c} + + assert.Nil(t, List[T](nil).Reverse()) + assert.Equal(t, List[T]{c, b, a}, l.Reverse()) + assert.Equal(t, List[T]{a, b, c}, l) +} + +func TestList_Concat(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.Equal(t, List[T]{a, c}, List[T](nil).Concat(List[T]{a, c})) + assert.Equal(t, List[T]{a, b, a, c}, l.Concat(List[T]{a, c})) + assert.Equal(t, List[T]{a, b}, l) +} + +func TestList_Intersect(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Intersect(List[T]{c, a})) + assert.Equal(t, List[T]{a}, l.Intersect(List[T]{c, a})) + assert.Equal(t, List[T]{a, b}, l) +} + +func TestList_Strings(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Strings()) + assert.Equal(t, []string{a.String(), b.String()}, l.Strings()) +} + +func TestList_Clone(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Clone()) + assert.Equal(t, List[T]{a, b}, l.Clone()) + assert.NotSame(t, l, l.Clone()) +} + +func TestList_Sort(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{b, a, a} + + assert.Nil(t, List[T](nil).Sort()) + assert.Equal(t, List[T]{a, a, b}, l.Sort()) +} + +func TestList_Deref(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := RefList[T]{&b, &a, nil, &c} + + assert.Nil(t, RefList[T](nil).Deref()) + assert.Equal(t, List[T]{b, a, c}, l.Deref()) +} diff --git a/server/pkg/id/idx/set.go b/server/pkg/id/idx/set.go new file mode 100644 index 000000000..c7d899e76 --- /dev/null +++ b/server/pkg/id/idx/set.go @@ -0,0 +1,85 @@ +package idx + +type Set[T Type] struct { + l List[T] + m map[ID[T]]struct{} +} + +func NewSet[T Type](id ...ID[T]) *Set[T] { + s := &Set[T]{} + s.Add(id...) + return s +} + +func (s *Set[T]) Has(id ...ID[T]) bool { + if s == nil || s.m == nil { + return false + } + for _, i := range id { + if _, ok := s.m[i]; ok { + return true + } + } + return false +} + +func (s *Set[T]) List() List[T] { + if s == nil { + return nil + } + return s.l.Clone() +} + +func (s *Set[T]) Clone() *Set[T] { + if s == nil { + return nil + } + return NewSet(s.l...) +} + +func (s *Set[T]) Add(id ...ID[T]) { + if s == nil { + return + } + for _, i := range id { + if !s.Has(i) { + if s.m == nil { + s.m = map[ID[T]]struct{}{} + } + s.m[i] = struct{}{} + s.l = append(s.l, i) + } + } +} + +func (s *Set[T]) Merge(sets ...*Set[T]) { + if s == nil { + return + } + for _, t := range sets { + if t != nil { + s.Add(t.l...) + } + } +} + +func (s *Set[T]) Concat(sets ...*Set[T]) *Set[T] { + if s == nil { + return nil + } + ns := s.Clone() + ns.Merge(sets...) + return ns +} + +func (s *Set[T]) Delete(id ...ID[T]) { + if s == nil { + return + } + for _, i := range id { + s.l = s.l.Delete(i) + if s.m != nil { + delete(s.m, i) + } + } +} diff --git a/server/pkg/id/idx/set_test.go b/server/pkg/id/idx/set_test.go new file mode 100644 index 000000000..8628a7f51 --- /dev/null +++ b/server/pkg/id/idx/set_test.go @@ -0,0 +1,86 @@ +package idx + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSet_NewSet(t *testing.T) { + a := New[T]() + assert.Equal(t, &Set[T]{ + l: nil, + m: nil, + }, NewSet[T]()) + assert.Equal(t, &Set[T]{ + l: List[T]{a}, + m: map[ID[T]]struct{}{ + a: {}, + }, + }, NewSet(a)) +} + +func TestSet_Has(t *testing.T) { + a := New[T]() + b := New[T]() + assert.False(t, (*Set[T])(nil).Has(a, b)) + assert.True(t, NewSet(a).Has(a, b)) + assert.False(t, NewSet(a).Has(b)) +} + +func TestSet_List(t *testing.T) { + a := New[T]() + b := New[T]() + assert.Nil(t, (*Set[T])(nil).List()) + assert.Nil(t, NewSet[T]().List()) + assert.Equal(t, List[T]{a, b}, NewSet(a, b).List()) +} + +func TestSet_Clone(t *testing.T) { + a := New[T]() + b := New[T]() + s := NewSet(a, b) + assert.Nil(t, (*Set[T])(nil).Clone()) + assert.Equal(t, &Set[T]{}, NewSet[T]().Clone()) + assert.Equal(t, s, s.Clone()) + assert.NotSame(t, s, s.Clone()) +} + +func TestSet_Add(t *testing.T) { + a := New[T]() + b := New[T]() + s := NewSet(a) + (*Set[T])(nil).Add(a, b) + s.Add(a, b) + assert.Equal(t, NewSet(a, b), s) +} + +func TestSet_Merge(t *testing.T) { + a := New[T]() + b := New[T]() + s := NewSet(a) + u := NewSet(a, b) + (*Set[T])(nil).Merge(u) + s.Merge(u) + assert.Equal(t, NewSet(a, b), s) +} + +func TestSet_Concat(t *testing.T) { + a := New[T]() + b := New[T]() + s := NewSet(a) + u := NewSet(a, b) + assert.Nil(t, (*Set[T])(nil).Concat(u)) + assert.Equal(t, NewSet(a, b), s.Concat(u)) + assert.Equal(t, NewSet(a), s) +} + +func TestSet_Delete(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + s := NewSet(a, b, c) + (*Set[T])(nil).Delete(a, b) + s.Delete(a, b) + assert.Equal(t, NewSet(c), s) +} diff --git a/server/pkg/id/idx/string.go b/server/pkg/id/idx/string.go new file mode 100644 index 000000000..7b431d19b --- /dev/null +++ b/server/pkg/id/idx/string.go @@ -0,0 +1,38 @@ +package idx + +type StringID[T Type] string + +func StringIDFromRef[T Type](id *string) *StringID[T] { + if id == nil { + return nil + } + id2 := StringID[T](*id) + return &id2 +} + +func (id StringID[T]) Ref() *StringID[T] { + if id == "" { + return nil + } + return &id +} + +func (id *StringID[T]) CloneRef() *StringID[T] { + if id == nil { + return nil + } + id2 := *id + return &id2 +} + +func (id StringID[_]) String() string { + return string(id) +} + +func (id *StringID[_]) StringRef() *string { + if id == nil { + return nil + } + id2 := string(*id) + return &id2 +} diff --git a/server/pkg/id/idx/string_test.go b/server/pkg/id/idx/string_test.go new file mode 100644 index 000000000..cd6254d86 --- /dev/null +++ b/server/pkg/id/idx/string_test.go @@ -0,0 +1,29 @@ +package idx + +import ( + "testing" + + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestStringID_Ref(t *testing.T) { + assert.Equal(t, lo.ToPtr(StringID[T]("a")), StringID[T]("a").Ref()) +} + +func TestStringID_CloneRef(t *testing.T) { + s := lo.ToPtr(StringID[T]("a")) + res := s.CloneRef() + assert.Equal(t, s, res) + assert.NotSame(t, s, res) + assert.Nil(t, (*StringID[T])(nil).CloneRef()) +} + +func TestStringID_String(t *testing.T) { + assert.Equal(t, "a", StringID[T]("a").String()) +} + +func TestStringID_StringRef(t *testing.T) { + assert.Equal(t, lo.ToPtr("a"), lo.ToPtr(StringID[T]("a")).StringRef()) + assert.Nil(t, (*StringID[T])(nil).StringRef()) +} diff --git a/server/pkg/id/idx/ulid.go b/server/pkg/id/idx/ulid.go new file mode 100644 index 000000000..c93c0c54a --- /dev/null +++ b/server/pkg/id/idx/ulid.go @@ -0,0 +1,54 @@ +package idx + +import ( + "math/rand" + "sync" + "time" + + "github.com/oklog/ulid" + "github.com/reearth/reearth-backend/pkg/util" +) + +var ( + entropyLock sync.Mutex + // not safe for concurrent + entropy = ulid.Monotonic(rand.New(rand.NewSource(time.Now().UnixNano())), 0) +) + +func generateID() ulid.ULID { + entropyLock.Lock() + newID := ulid.MustNew(ulid.Timestamp(time.Now().UTC()), entropy) + entropyLock.Unlock() + return newID +} + +func generateAllID(n int) []ulid.ULID { + ids := make([]ulid.ULID, 0, n) + entropyLock.Lock() + for i := 0; i < n; i++ { + newID := ulid.MustNew(ulid.Timestamp(time.Now().UTC()), entropy) + ids = append(ids, newID) + } + entropyLock.Unlock() + return ids +} + +func parseID(id string) (parsedID ulid.ULID, e error) { + if includeUpperCase(id) { + return parsedID, ErrInvalidID + } + return ulid.Parse(id) +} + +func includeUpperCase(s string) bool { + for _, c := range s { + if 'A' <= c && c <= 'Z' { + return true + } + } + return false +} + +func mustParseID(id string) ulid.ULID { + return util.Must(parseID(id)) +} diff --git a/server/pkg/id/plugin.go b/server/pkg/id/plugin.go new file mode 100644 index 000000000..7b339215a --- /dev/null +++ b/server/pkg/id/plugin.go @@ -0,0 +1,258 @@ +package id + +import ( + "regexp" + "strings" + + "github.com/blang/semver" +) + +type PluginID struct { + name string + version string + sys bool + scene *SceneID +} + +// MUST NOT CHANGE +const ( + officialPluginIDStr = "reearth" + sepPluginID = "~" +) + +var ( + OfficialPluginID = PluginID{name: officialPluginIDStr, sys: true} + pluginNameRe = regexp.MustCompile("^[a-zA-Z0-9_-]+$") +) + +func validatePluginName(s string) bool { + if len(s) == 0 || len(s) > 100 || s == "reearth" || strings.Contains(s, "/") { + return false + } + return pluginNameRe.MatchString(s) +} + +func NewPluginID(name string, version string, scene *SceneID) (PluginID, error) { + if name == officialPluginIDStr { + // official plugin + return PluginID{name: name, sys: true}, nil + } + + if !validatePluginName(name) { + return PluginID{}, ErrInvalidID + } + + if _, err := semver.Parse(version); err != nil { + return PluginID{}, ErrInvalidID + } + + return PluginID{ + name: name, + version: version, + scene: scene.CopyRef(), + }, nil +} + +// PluginIDFrom generates a new id.PluginID from a string. +func PluginIDFrom(id string) (PluginID, error) { + if id == officialPluginIDStr { + // official plugin + return PluginID{name: id, sys: true}, nil + } + + var name, version string + var sceneID *SceneID + + ids := strings.SplitN(id, sepPluginID, 3) + switch len(ids) { + case 2: + name = ids[0] + version = ids[1] + case 3: + sceneID2, err := SceneIDFrom(ids[0]) + if err != nil { + return PluginID{}, ErrInvalidID + } + sceneID = &sceneID2 + name = ids[1] + version = ids[2] + default: + return PluginID{}, ErrInvalidID + } + + return NewPluginID(name, version, sceneID) +} + +// MustPluginID generates a new id.PluginID from a string, but panics if the string cannot be parsed. +func MustPluginID(id string) PluginID { + did, err := PluginIDFrom(id) + if err != nil { + panic(err) + } + return did +} + +// PluginIDFromRef generates a new id.PluginID from a string ref. +func PluginIDFromRef(id *string) *PluginID { + if id == nil { + return nil + } + did, err := PluginIDFrom(*id) + if err != nil { + return nil + } + return &did +} + +func (d PluginID) WithScene(sid *SceneID) PluginID { + return PluginID{ + name: d.name, + version: d.version, + sys: d.sys, + scene: sid.CopyRef(), + } +} + +// Clone duplicates the PluginID +func (d PluginID) Clone() PluginID { + return PluginID{ + name: d.name, + version: d.version, + sys: d.sys, + scene: d.scene.CopyRef(), + } +} + +// IsNil checks if ID is empty or not. +func (d PluginID) IsNil() bool { + return d.name == "" && d.version == "" && d.scene == nil && !d.sys +} + +// Name returns a name. +func (d PluginID) Name() string { + return d.name +} + +// Version returns a version. +func (d PluginID) Version() semver.Version { + if d.version == "" { + return semver.Version{} + } + v, err := semver.Parse(d.version) + if err != nil { + return semver.Version{} + } + return v +} + +// System returns if the ID is built-in. +func (d PluginID) System() bool { + return d.sys +} + +// Scene returns a scene ID of the plugin. It indicates this plugin is private and available for only the specific scene. +func (d PluginID) Scene() *SceneID { + return d.scene.CopyRef() +} + +// Validate returns true if id is valid. +func (d PluginID) Validate() bool { + if d.sys { + return true + } + return validatePluginName(d.name) +} + +// String returns a string representation. +func (d PluginID) String() (s string) { + if d.sys { + return d.name + } + if d.scene != nil { + s = d.scene.String() + sepPluginID + } + s += d.name + sepPluginID + d.version + return +} + +// Ref returns a reference. +func (d PluginID) Ref() *PluginID { + d2 := d + return &d2 +} + +func (d *PluginID) CopyRef() *PluginID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// StringRef returns a reference of a string representation. +func (d *PluginID) StringRef() *string { + if d == nil { + return nil + } + id := (*d).String() + return &id +} + +// Equal returns true if two IDs are equal. +func (d PluginID) Equal(d2 PluginID) bool { + if d.sys { + return d2.sys + } + if d.scene != nil { + if d2.scene == nil || *d.scene != *d2.scene { + return false + } + } + return d.name == d2.name && d.version == d2.version +} + +// NameEqual returns true if names of two IDs are equal. +func (d PluginID) NameEqual(d2 PluginID) bool { + if d.sys { + return d2.sys + } + if d.scene != nil { + if d2.scene == nil || *d.scene != *d2.scene { + return false + } + } + return d.name == d2.name +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *PluginID) MarshalText() ([]byte, error) { + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *PluginID) UnmarshalText(text []byte) (err error) { + *d, err = PluginIDFrom(string(text)) + return +} + +// PluginIDToKeys converts IDs into a string slice. +func PluginIDsToStrings(ids []PluginID) []string { + keys := make([]string, 0, len(ids)) + for _, id := range ids { + keys = append(keys, id.String()) + } + return keys +} + +// PluginIDsFrom converts a string slice into a ID slice. +func PluginIDsFrom(ids []string) ([]PluginID, error) { + dids := make([]PluginID, 0, len(ids)) + for _, id := range ids { + did, err := PluginIDFrom(id) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} diff --git a/server/pkg/id/plugin_test.go b/server/pkg/id/plugin_test.go new file mode 100644 index 000000000..7a9a24694 --- /dev/null +++ b/server/pkg/id/plugin_test.go @@ -0,0 +1,779 @@ +package id + +import ( + "encoding" + "strings" + "testing" + + "github.com/blang/semver" + "github.com/stretchr/testify/assert" +) + +var _ encoding.TextMarshaler = (*PluginID)(nil) +var _ encoding.TextUnmarshaler = (*PluginID)(nil) + +func TestPluginIDValidator(t *testing.T) { + tests := []struct { + name string + input string + expected bool + }{ + { + name: "accepted name", + input: "1cc1_c-d", + expected: true, + }, + { + name: "les then 100", + input: strings.Repeat("a", 100), + expected: true, + }, + { + name: "empty", + input: "", + expected: false, + }, + { + name: "spaces", + input: " ", + expected: false, + }, + { + name: "contains not accepted characters", + input: "@bbb/aa-a_a", + expected: false, + }, + { + name: "contain space", + input: "bbb a", + expected: false, + }, + { + name: "contain =", + input: "cccd=", + expected: false, + }, + { + name: "contains reearth reserved key word", + input: "reearth", + expected: false, + }, + { + name: "more than 100 char", + input: strings.Repeat("a", 101), + expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, validatePluginName(tc.input)) + }) + } +} + +func TestNewPluginID(t *testing.T) { + tests := []struct { + name string + pluginName string + version string + scene *SceneID + expected PluginID + expectedError bool + }{ + { + name: "success:accepted name", + pluginName: "1ccc1_c-d", + version: "1.0.0", + scene: nil, + expected: PluginID{ + name: "1ccc1_c-d", + version: "1.0.0", + sys: false, + scene: nil, + }, + }, + { + name: "success:with scene id", + pluginName: "aaaaa", + version: "0.1.0", + scene: MustSceneID("01fbpdqax0ttrftj3gb5gm4rw7").Ref(), + expected: PluginID{ + name: "aaaaa", + version: "0.1.0", + sys: false, + scene: MustSceneID("01fbpdqax0ttrftj3gb5gm4rw7").Ref(), + }, + }, + { + name: "success:official plugin id", + pluginName: officialPluginIDStr, + expected: PluginID{ + name: officialPluginIDStr, + version: "", + sys: true, + scene: nil, + }, + }, + { + name: "fail:invalid name1", + pluginName: "1cc1_c-d", + version: "", + scene: nil, + expectedError: true, + }, + { + name: "fail:invalid name2", + pluginName: "1cc1_c-d/?s", + version: "1.0.0", + scene: nil, + expectedError: true, + }, + { + name: "fail:invalid name3", + pluginName: "1cc1_c-d/?s", + version: "_1", + scene: nil, + expectedError: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + result, err := NewPluginID(tc.pluginName, tc.version, tc.scene) + if tc.expectedError { + assert.Error(t, err) + } else { + assert.Equal(t, tc.expected, result) + } + }) + } +} + +func TestPluginIDFrom(t *testing.T) { + tests := []struct { + name string + input string + expected PluginID + expectedError bool + }{ + { + name: "success:accepted name", + input: "1cc1_c-d~1.0.0", + expected: PluginID{ + name: "1cc1_c-d", + version: "1.0.0", + sys: false, + scene: nil, + }, + }, + { + name: "success:with scene id", + input: "01fbpdqax0ttrftj3gb5gm4rw7~aaaaa~0.1.0", + expected: PluginID{ + name: "aaaaa", + version: "0.1.0", + sys: false, + scene: MustSceneID("01fbpdqax0ttrftj3gb5gm4rw7").Ref(), + }, + }, + { + name: "success:official plugin id", + input: officialPluginIDStr, + expected: PluginID{ + name: officialPluginIDStr, + version: "", + sys: true, + scene: nil, + }, + }, + { + name: "fail:invalid name1", + input: "1cc1_c-d", + expectedError: true, + }, + { + name: "fail:invalid name2", + input: "1cc1_c-d/?s~1.0.0", + expectedError: true, + }, + { + name: "fail:invalid name3", + input: "1cc1_c-d/?s~1", + expectedError: true, + }, + { + name: "fail:invalid scene id", + input: "xxxx~ssss~1.0.0", + expectedError: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + result, err := PluginIDFrom(tc.input) + if tc.expectedError { + assert.Error(t, err) + } else { + assert.Equal(t, tc.expected, result) + } + }) + } +} + +func TestMustPluginID(t *testing.T) { + tests := []struct { + name string + input string + expected PluginID + expectedError bool + }{ + { + name: "success:accepted name", + input: "1cc1_c-d~1.0.0", + expected: PluginID{ + name: "1cc1_c-d", + version: "1.0.0", + sys: false, + }, + }, + { + name: "fail:invalid name", + input: "1cc.1_c-d", + expectedError: true, + }, + { + name: "fail:invalid name2", + input: "1cc.1_c-d/?s~1.0.0", + expectedError: true, + }, + { + name: "fail:invalid name3", + input: "1cc.1_c-d/?s~1", + expectedError: true, + }, + { + name: "fail:invalid scene id", + input: "xxxx~ssss~1.0.0", + expectedError: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + if tc.expectedError { + assert.Panics(t, func() { + _ = MustPluginID(tc.input) + }) + } else { + result := MustPluginID(tc.input) + assert.Equal(t, tc.expected, result) + } + }) + } +} + +func TestPluginIDFromRef(t *testing.T) { + tests := []struct { + name string + input string + expected *PluginID + }{ + { + name: "success:accepted name", + input: "1cc1_c-d~1.0.0", + expected: &PluginID{ + name: "1cc1_c-d", + version: "1.0.0", + sys: false, + }, + }, + { + name: "fail:invalid name1", + input: "1cc1_c-d", + }, + { + name: "fail:invalid name2", + input: "1cc1_c-d/?s~1.0.0", + }, + { + name: "fail:invalid name3", + input: "1cc1_c-d/?s~1", + }, + { + name: "fail:invalid scene id", + input: "xxxx~ssss~1.0.0", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + if tc.expected == nil { + result := PluginIDFromRef(&tc.input) + assert.Nil(t, result) + } else { + result := PluginIDFromRef(&tc.input) + assert.Equal(t, *tc.expected, *result) + } + }) + } +} + +func TestPluginID_WithScene(t *testing.T) { + sid := NewSceneID().Ref() + + assert.Equal(t, PluginID{ + name: "aaa", + version: "1.0.0", + sys: false, + scene: sid, + }, PluginID{ + name: "aaa", + version: "1.0.0", + sys: false, + scene: nil, + }.WithScene(sid)) +} + +func TestPluginID_Clone(t *testing.T) { + p := PluginID{ + name: "aaa", + version: "1.0.0", + sys: false, + scene: NewSceneID().Ref(), + } + c := p.Clone() + + assert.Equal(t, p, c) + assert.NotSame(t, p, c) +} + +func TestPluginID_Name(t *testing.T) { + plugin := MustPluginID("MyPlugin~1.0.0") + + assert.Equal(t, "MyPlugin", plugin.Name()) +} + +func TestPluginID_Version(t *testing.T) { + plugin := MustPluginID("MyPlugin~1.0.0") + + assert.Equal(t, semver.MustParse("1.0.0"), plugin.Version()) +} + +func TestPluginID_Scene(t *testing.T) { + scene := MustSceneID("01fbpdqax0ttrftj3gb5gm4rw7") + sid := PluginID{ + scene: &scene, + }.Scene() + assert.Equal(t, scene, *sid) + assert.NotSame(t, scene, *sid) +} + +func TestPluginID_System(t *testing.T) { + plugin := MustPluginID("MyPlugin~1.0.0") + + assert.False(t, plugin.System()) + + plugin = MustPluginID(officialPluginIDStr) + + assert.True(t, plugin.System()) +} + +func TestPluginID_Validate(t *testing.T) { + tests := []struct { + name string + input PluginID + expected bool + }{ + { + name: "success:accepted name", + input: PluginID{ + name: "1cc1_c-d", + version: "1.0.0", + sys: false, + }, + expected: true, + }, + { + name: "success:accepted name", + input: PluginID{ + name: "1cc1/?_c-d", + version: "1.0.0", + sys: false, + }, + expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.expected, tc.input.Validate()) + }) + } +} + +func TestPluginID_String(t *testing.T) { + tests := []struct { + name string + input PluginID + expected string + }{ + { + name: "accepted name1", + input: PluginID{ + name: "ppl", + version: "1.0.0", + scene: nil, + sys: false, + }, + expected: "ppl~1.0.0", + }, + { + name: "accepted name2", + input: PluginID{ + name: "plg", + version: "2.1.0-beta", + scene: nil, + sys: false, + }, + expected: "plg~2.1.0-beta", + }, + { + name: "with scene id", + input: PluginID{ + name: "plg", + version: "2.1.0-beta", + scene: MustSceneID("01fbpdqax0ttrftj3gb5gm4rw7").Ref(), + sys: false, + }, + expected: "01fbpdqax0ttrftj3gb5gm4rw7~plg~2.1.0-beta", + }, + { + name: "system", + input: PluginID{ + name: "reearth", + version: "", + scene: nil, + sys: true, + }, + expected: "reearth", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.expected, tc.input.String()) + }) + } +} + +func TestPluginID_Ref(t *testing.T) { + pluginID := MustPluginID("Test~1.0.0") + + ref := pluginID.Ref() + + assert.Equal(t, *ref, pluginID) +} + +func TestPluginID_CopyRef(t *testing.T) { + pluginID := MustPluginID("Test~1.0.0") + + ref := pluginID.Ref() + + ref2 := ref.CopyRef() + + assert.Equal(t, *ref, pluginID) + assert.Equal(t, *ref2, pluginID) + assert.Equal(t, *ref, *ref2) +} + +func TestPluginID_StringRef(t *testing.T) { + pluginID := MustPluginID("Test~1.0.0") + + ref := pluginID.Ref() + + strRef := ref.StringRef() + + assert.Equal(t, pluginID.String(), *strRef) +} + +func TestPluginID_Equal(t *testing.T) { + tests := []struct { + name string + input1 PluginID + input2 PluginID + expected bool + }{ + { + name: "system", + input1: MustPluginID("reearth"), + input2: MustPluginID("reearth"), + expected: true, + }, + { + name: "system and normal", + input1: MustPluginID("reearth"), + input2: MustPluginID("Test~1.0.0"), + expected: false, + }, + { + name: "same", + input1: MustPluginID("Test~1.0.0"), + input2: MustPluginID("Test~1.0.0"), + expected: true, + }, + { + name: "diff version", + input1: MustPluginID("Test~1.0.0"), + input2: MustPluginID("Test~1.0.1"), + expected: false, + }, + { + name: "diff name", + input1: MustPluginID("Test0~1.0.0"), + input2: MustPluginID("Test1~1.0.0"), + expected: false, + }, + { + name: "same scene", + input1: MustPluginID("01fbprc3j929w0a3h16nh8rqy6~Test~1.0.0"), + input2: MustPluginID("01fbprc3j929w0a3h16nh8rqy6~Test~1.0.0"), + expected: true, + }, + { + name: "diff scene", + input1: MustPluginID("01fbprc3j929w0a3h16nh8rqy6~Test~1.0.0"), + input2: MustPluginID("01fbprc3j929w0a3h16nh8rqy7~Test~1.0.0"), + expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input1.Equal(tc.input2)) + assert.Equal(t, tc.expected, tc.input2.Equal(tc.input1)) + }) + } +} + +func TestPluginID_NameEqual(t *testing.T) { + tests := []struct { + name string + input1 PluginID + input2 PluginID + expected bool + }{ + { + name: "system", + input1: MustPluginID("reearth"), + input2: MustPluginID("reearth"), + expected: true, + }, + { + name: "system and normal", + input1: MustPluginID("reearth"), + input2: MustPluginID("Test~1.0.0"), + expected: false, + }, + { + name: "same", + input1: MustPluginID("Test~1.0.0"), + input2: MustPluginID("Test~1.0.0"), + expected: true, + }, + { + name: "diff version", + input1: MustPluginID("Test~1.0.0"), + input2: MustPluginID("Test~1.0.1"), + expected: true, + }, + { + name: "diff name", + input1: MustPluginID("Test0~1.0.0"), + input2: MustPluginID("Test1~1.0.0"), + expected: false, + }, + { + name: "same scene", + input1: MustPluginID("01fbprc3j929w0a3h16nh8rqy6~Test~1.0.0"), + input2: MustPluginID("01fbprc3j929w0a3h16nh8rqy6~Test~1.0.0"), + expected: true, + }, + { + name: "diff scene", + input1: MustPluginID("01fbprc3j929w0a3h16nh8rqy6~Test~1.0.0"), + input2: MustPluginID("01fbprc3j929w0a3h16nh8rqy7~Test~1.0.0"), + expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input1.NameEqual(tc.input2)) + assert.Equal(t, tc.expected, tc.input2.NameEqual(tc.input1)) + }) + } +} + +func TestPluginID_MarshalText(t *testing.T) { + pluginIdRef := MustPluginID("Test~1.0.0").Ref() + + res, err := pluginIdRef.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte("Test~1.0.0"), res) +} + +func TestPluginID_UnmarshalText(t *testing.T) { + text := []byte("Test~1.0.0") + + pluginId := &PluginID{} + + err := pluginId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "Test~1.0.0", pluginId.String()) + +} + +func TestPluginIDsToStrings(t *testing.T) { + tests := []struct { + name string + input []PluginID + expected []string + }{ + { + name: "Empty slice", + input: make([]PluginID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []PluginID{MustPluginID("Test~1.0.0")}, + expected: []string{"Test~1.0.0"}, + }, + { + name: "multiple elements", + input: []PluginID{ + MustPluginID("Test~1.0.0"), + MustPluginID("Test~1.0.1"), + MustPluginID("Test~1.0.2"), + }, + expected: []string{ + "Test~1.0.0", + "Test~1.0.1", + "Test~1.0.2", + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, PluginIDsToStrings(tc.input)) + }) + } +} + +func TestPluginIDsFrom(t *testing.T) { + tests := []struct { + name string + input []string + expected []PluginID + err error + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: []PluginID{}, + }, + { + name: "1 element", + input: []string{"Test~1.0.0"}, + expected: []PluginID{MustPluginID("Test~1.0.0")}, + }, + { + name: "multiple elements", + input: []string{ + "Test~1.0.0", + "Test~1.0.1", + "Test~1.0.2", + }, + expected: []PluginID{ + MustPluginID("Test~1.0.0"), + MustPluginID("Test~1.0.1"), + MustPluginID("Test~1.0.2"), + }, + }, + { + name: "invalid element", + input: []string{ + "Test~1.0.0", + "Test~1.0.1", + "Test", + }, + expected: nil, + err: ErrInvalidID, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + res, err := PluginIDsFrom(tt.input) + if tt.err != nil { + assert.Nil(t, res) + assert.Equal(t, tt.err, err) + } else { + assert.Equal(t, tt.expected, res) + assert.Nil(t, err) + } + }) + } +} + +func TestPluginID_IsNil(t *testing.T) { + tests := []struct { + name string + target PluginID + want bool + }{ + { + name: "present", + target: PluginID{name: "a"}, + want: false, + }, + { + name: "empty", + target: PluginID{}, + want: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.IsNil()) + }) + } +} diff --git a/server/pkg/id/property_schema.go b/server/pkg/id/property_schema.go new file mode 100644 index 000000000..8ba76ff2f --- /dev/null +++ b/server/pkg/id/property_schema.go @@ -0,0 +1,159 @@ +package id + +import ( + "regexp" + "strings" +) + +var propertySchemaIDRe = regexp.MustCompile("^[a-zA-Z0-9][a-zA-Z0-9_-]*$|^@$") + +type PropertySchemaID struct { + plugin PluginID + id string +} + +// NewPropertySchemaID generates a new PropertySchemaID from a plugin ID and name. +func NewPropertySchemaID(p PluginID, name string) PropertySchemaID { + if p.IsNil() || !propertySchemaIDRe.MatchString(name) { + return PropertySchemaID{} + } + return PropertySchemaID{plugin: p.Clone(), id: name} +} + +// PropertySchemaIDFrom generates a new PropertySchemaID from a string. +func PropertySchemaIDFrom(id string) (PropertySchemaID, error) { + ids := strings.SplitN(id, "/", 2) + if len(ids) < 2 || !propertySchemaIDRe.MatchString(ids[len(ids)-1]) { + return PropertySchemaID{}, ErrInvalidID + } + pid, err := PluginIDFrom(ids[0]) + if err != nil { + return PropertySchemaID{}, ErrInvalidID + } + return PropertySchemaID{plugin: pid, id: ids[1]}, nil +} + +// PropertySchemaIDFromRef generates a new PropertySchemaID from a string ref. +func PropertySchemaIDFromRef(id *string) *PropertySchemaID { + if id == nil { + return nil + } + did, err := PropertySchemaIDFrom(*id) + if err != nil { + return nil + } + return &did +} + +// MustPropertySchemaID generates a new PropertySchemaID from a string, but panics if the string cannot be parsed. +func MustPropertySchemaID(id string) PropertySchemaID { + did, err := PropertySchemaIDFrom(id) + if err != nil { + panic(err) + } + return did +} + +// Clone duplicates the PropertySchemaID +func (d PropertySchemaID) Clone() PropertySchemaID { + return PropertySchemaID{ + plugin: d.plugin.Clone(), + id: d.id, + } +} + +// WithPlugin duplicates the PropertySchemaID but its plugin ID is changed +func (d PropertySchemaID) WithPlugin(plugin PluginID) PropertySchemaID { + return PropertySchemaID{ + plugin: plugin.Clone(), + id: d.id, + } +} + +// ID returns a fragment of just ID. +func (d PropertySchemaID) ID() string { + return d.id +} + +// Plugin returns a fragment of plugin ID. +func (d PropertySchemaID) Plugin() PluginID { + return d.plugin +} + +// String returns a string representation. +func (d PropertySchemaID) String() string { + if d.IsNil() { + return "" + } + return d.plugin.String() + "/" + d.id +} + +// Ref returns a reference. +func (d PropertySchemaID) Ref() *PropertySchemaID { + if d.IsNil() { + return nil + } + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *PropertySchemaID) CopyRef() *PropertySchemaID { + if d == nil || d.IsNil() { + return nil + } + d2 := *d + return &d2 +} + +// IsNil checks if ID is empty or not. +func (d PropertySchemaID) IsNil() bool { + return d.plugin.IsNil() && d.id == "" +} + +// Equal returns true if two IDs are equal. +func (d PropertySchemaID) Equal(d2 PropertySchemaID) bool { + return d.plugin.Equal(d2.plugin) && d.id == d2.id +} + +// StringRef returns a reference of a string representation. +func (d *PropertySchemaID) StringRef() *string { + if d == nil { + return nil + } + id := d.String() + return &id +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *PropertySchemaID) MarshalText() ([]byte, error) { + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *PropertySchemaID) UnmarshalText(text []byte) (err error) { + *d, err = PropertySchemaIDFrom(string(text)) + return +} + +// PropertySchemaIDToKeys converts IDs into a string slice. +func PropertySchemaIDsToStrings(ids []PropertySchemaID) []string { + keys := make([]string, 0, len(ids)) + for _, id := range ids { + keys = append(keys, id.String()) + } + return keys +} + +// PropertySchemaIDsFrom converts a string slice into a ID slice. +func PropertySchemaIDsFrom(ids []string) ([]PropertySchemaID, error) { + dids := make([]PropertySchemaID, 0, len(ids)) + for _, id := range ids { + did, err := PropertySchemaIDFrom(id) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} diff --git a/server/pkg/id/property_schema_test.go b/server/pkg/id/property_schema_test.go new file mode 100644 index 000000000..f604b26ac --- /dev/null +++ b/server/pkg/id/property_schema_test.go @@ -0,0 +1,423 @@ +package id + +import ( + "encoding" + "testing" + + "github.com/stretchr/testify/assert" +) + +var _ encoding.TextMarshaler = (*PropertySchemaID)(nil) +var _ encoding.TextUnmarshaler = (*PropertySchemaID)(nil) + +func TestNewPropertySchemaID(t *testing.T) { + pluginID := MustPluginID("test~2.0.0") + pluginExtensionID := "test2" + propertySchemaID := NewPropertySchemaID(pluginID, pluginExtensionID) + + assert.NotNil(t, propertySchemaID) + assert.Equal(t, PropertySchemaID{ + plugin: MustPluginID("test~2.0.0"), + id: "test2", + }, propertySchemaID) + + assert.Equal(t, PropertySchemaID{}, NewPropertySchemaID(PluginID{}, "a")) + assert.Equal(t, PropertySchemaID{}, NewPropertySchemaID(pluginID, "")) +} + +func TestPropertySchemaIDFrom(t *testing.T) { + tests := []struct { + name string + input string + expected struct { + result PropertySchemaID + err error + } + }{ + { + name: "success", + input: "test~1.0.0/Test_Test-01", + expected: struct { + result PropertySchemaID + err error + }{ + result: PropertySchemaID{ + plugin: MustPluginID("test~1.0.0"), + id: "Test_Test-01", + }, + err: nil, + }, + }, + { + name: "success: @", + input: "test~1.0.0/@", + expected: struct { + result PropertySchemaID + err error + }{ + result: PropertySchemaID{ + plugin: MustPluginID("test~1.0.0"), + id: "@", + }, + err: nil, + }, + }, + { + name: "fail 1", + input: "Test", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + { + name: "fail 2", + input: "Test/+dsad", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + { + name: "fail 3", + input: "Test/-", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + { + name: "fail 4", + input: "Test/__", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := PropertySchemaIDFrom(tt.input) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.result, result) + assert.Equal(t, tt.expected.err, err) + } else { + assert.Equal(t, tt.expected.result, result) + assert.Nil(t, err) + } + }) + } +} + +func TestMustPropertySchemaID(t *testing.T) { + tests := []struct { + name string + input string + expected struct { + result PropertySchemaID + err error + } + }{ + { + name: "success:valid name", + input: "test~1.0.0/Test_Test-01", + expected: struct { + result PropertySchemaID + err error + }{ + result: PropertySchemaID{ + plugin: MustPluginID("test~1.0.0"), + id: "Test_Test-01", + }, + err: nil, + }, + }, + { + name: "fail:invalid name", + input: "Test", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + { + name: "fail:invalid name", + input: "Test/+dsad", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + { + name: "fail:invalid name", + input: "Test/dsa d", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.expected.err != nil { + assert.Panics(t, func() { + _ = MustPropertySchemaID(tt.input) + }) + } else { + result := MustPropertySchemaID(tt.input) + assert.Equal(t, tt.expected.result, result) + } + }) + } +} + +func TestPropertySchemaIDFromRef(t *testing.T) { + tests := []struct { + name string + input string + expected *PropertySchemaID + }{ + { + name: "success:valid name", + input: "test~1.0.0/Test_Test-01", + expected: &PropertySchemaID{ + plugin: MustPluginID("test~1.0.0"), + id: "Test_Test-01", + }, + }, + { + name: "fail:invalid name 1", + input: "Test~1.0.0", + expected: nil, + }, + { + name: "fail:invalid name 2", + input: "Test~1.0.0/+dsad", + expected: nil, + }, + { + name: "fail:invalid name 3", + input: "Test~1.0.0/dsa d", + expected: nil, + }, + { + name: "fail:invalid name 4", + input: "Test/dsa", + expected: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + result := PropertySchemaIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestPropertySchemaID_Clone(t *testing.T) { + p := PropertySchemaID{ + id: "xxx", + plugin: PluginID{ + name: "aaa", + version: "1.0.0", + sys: false, + scene: NewSceneID().Ref(), + }, + } + c := p.Clone() + + assert.Equal(t, p, c) + assert.NotSame(t, p, c) +} + +func TestPropertySchemaID_WithPlugin(t *testing.T) { + c := PropertySchemaID{ + id: "xxx", + plugin: PluginID{ + name: "aaa", + version: "1.0.0", + }, + }.WithPlugin(PluginID{ + name: "aaa", + version: "1.1.0", + }) + + assert.Equal(t, PropertySchemaID{ + id: "xxx", + plugin: PluginID{ + name: "aaa", + version: "1.1.0", + }, + }, c) +} + +func TestPropertySchemaID_ID(t *testing.T) { + propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") + assert.Equal(t, propertySchemaID.ID(), "test") +} + +func TestPropertySchemaID_Plugin(t *testing.T) { + propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") + assert.Equal(t, MustPluginID("Test~2.0.0"), propertySchemaID.Plugin()) +} + +func TestPropertySchemaID_String(t *testing.T) { + propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") + assert.Equal(t, propertySchemaID.String(), "Test~2.0.0/test") +} + +func TestPropertySchemaID_Ref(t *testing.T) { + propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") + assert.Equal(t, &propertySchemaID, propertySchemaID.Ref()) +} + +func TestPropertySchemaID_CopyRef(t *testing.T) { + propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") + assert.Equal(t, propertySchemaID, *propertySchemaID.CopyRef()) + assert.NotSame(t, propertySchemaID.Ref(), propertySchemaID.CopyRef()) +} + +func TestPropertySchemaID_IsNil(t *testing.T) { + propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") + assert.False(t, propertySchemaID.IsNil()) + propertySchemaID = PropertySchemaID{} + assert.True(t, propertySchemaID.IsNil()) +} + +func TestPropertySchemaID_Equal(t *testing.T) { + propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") + propertySchemaID2, _ := PropertySchemaIDFrom("test~2.0.0/test") + propertySchemaID3, _ := PropertySchemaIDFrom("test~2.0.1/test") + assert.True(t, propertySchemaID.Equal(propertySchemaID2)) + assert.False(t, propertySchemaID.Equal(propertySchemaID3)) +} + +func TestPropertySchemaID_StringRef(t *testing.T) { + propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") + ref := &propertySchemaID + assert.Equal(t, *ref.StringRef(), ref.String()) +} + +func TestPropertySchemaID_MarshalText(t *testing.T) { + propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") + res, err := propertySchemaID.MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte("test~2.0.0/test"), res) +} + +func TestPropertySchemaID_UnmarshalText(t *testing.T) { + text := []byte("test~2.0.0/test") + propertySchemaID := &PropertySchemaID{} + err := propertySchemaID.UnmarshalText(text) + assert.Nil(t, err) + assert.Equal(t, "test~2.0.0/test", propertySchemaID.String()) +} + +func TestPropertySchemaIDsToStrings(t *testing.T) { + tests := []struct { + name string + input []PropertySchemaID + expected []string + }{ + { + name: "Empty slice", + input: make([]PropertySchemaID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []PropertySchemaID{MustPropertySchemaID("test~2.0.0/test")}, + expected: []string{"test~2.0.0/test"}, + }, + { + name: "multiple elements", + input: []PropertySchemaID{ + MustPropertySchemaID("Test~1.0.0/test"), + MustPropertySchemaID("Test~1.0.1/test"), + MustPropertySchemaID("Test~1.0.2/test"), + }, + expected: []string{ + "Test~1.0.0/test", + "Test~1.0.1/test", + "Test~1.0.2/test", + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, PropertySchemaIDsToStrings(tt.input)) + }) + } + +} + +func TestPropertySchemaIDsFrom(t *testing.T) { + tests := []struct { + name string + input []string + expected []PropertySchemaID + err error + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: []PropertySchemaID{}, + }, + { + name: "1 element", + input: []string{"Test~1.0.0/test"}, + expected: []PropertySchemaID{MustPropertySchemaID("Test~1.0.0/test")}, + }, + { + name: "multiple elements", + input: []string{ + "Test~1.0.0/test", + "Test~1.0.1/test", + "Test~1.0.2/test", + }, + expected: []PropertySchemaID{ + MustPropertySchemaID("Test~1.0.0/test"), + MustPropertySchemaID("Test~1.0.1/test"), + MustPropertySchemaID("Test~1.0.2/test"), + }, + }, + { + name: "invalid elements", + input: []string{ + "Test~1.0.0/test", + "Test~1.0.1/test", + "Test~1.0.2", + }, + err: ErrInvalidID, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + res, err := PropertySchemaIDsFrom(tt.input) + if tt.err != nil { + assert.Nil(t, res) + assert.Equal(t, tt.err, err) + } else { + assert.Equal(t, tt.expected, res) + assert.Nil(t, err) + } + }) + } +} diff --git a/server/pkg/kml/kml.go b/server/pkg/kml/kml.go new file mode 100644 index 000000000..0422cc9e8 --- /dev/null +++ b/server/pkg/kml/kml.go @@ -0,0 +1,68 @@ +package kml + +import ( + "github.com/reearth/reearth-backend/pkg/property" +) + +type Collection struct { + Folders []Collection `xml:"Folder"` + Placemarks []Placemark `xml:"Placemark"` + Styles []Style `xml:"Style"` + Name string `xml:"name"` +} +type Placemark struct { + Point Point `xml:"Point"` + Polygon Polygon `xml:"Polygon"` + Polyline LineString `xml:"LineString"` + Name string `xml:"name"` + StyleUrl string `xml:"styleUrl"` +} +type BoundaryIs struct { + LinearRing LinearRing `xml:"LinearRing"` +} +type LinearRing struct { + Coordinates string `xml:"coordinates"` +} +type Point struct { + Coordinates string `xml:"coordinates"` +} + +type Polygon struct { + OuterBoundaryIs BoundaryIs `xml:"outerBoundaryIs"` + InnerBoundaryIs []BoundaryIs `xml:"innerBoundaryIs"` +} +type LineString struct { + Coordinates string `xml:"coordinates"` +} +type PointFields struct { + Latlng *property.LatLng + Height *float64 +} + +type IconStyle struct { + Icon *Icon `xml:"Icon"` + Color string `xml:"color"` + Scale float64 `xml:"scale"` +} +type Icon struct { + Href string `xml:"href"` +} + +// Marker Styling +type Style struct { + Id string `xml:"id,attr"` + IconStyle IconStyle `xml:"IconStyle"` + LineStyle LineStyle `xml:"LineStyle"` + PolyStyle PolyStyle `xml:"PolyStyle"` +} + +// Polyline Styling +type LineStyle struct { + Color string `xml:"color"` + Width float64 `xml:"width"` +} +type PolyStyle struct { + Color string `xml:"color"` + Fill bool `xml:"fill"` + Stroke bool `xml:"outline"` +} diff --git a/server/pkg/layer/builder.go b/server/pkg/layer/builder.go new file mode 100644 index 000000000..21612b6f4 --- /dev/null +++ b/server/pkg/layer/builder.go @@ -0,0 +1,74 @@ +package layer + +type Builder struct { + base layerBase +} + +func New() *Builder { + return &Builder{base: layerBase{visible: true}} +} + +func (b *Builder) Group() *GroupBuilder { + return NewGroup().base(b.base) +} + +func (b *Builder) Item() *ItemBuilder { + return NewItem().base(b.base) +} + +func (b *Builder) ID(id ID) *Builder { + b.base.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.base.id = NewID() + return b +} + +func (b *Builder) Scene(s SceneID) *Builder { + b.base.scene = s + return b +} + +func (b *Builder) Name(name string) *Builder { + b.base.name = name + return b +} + +func (b *Builder) IsVisible(visible bool) *Builder { + b.base.visible = visible + return b +} + +func (b *Builder) IsVisibleRef(visible *bool) *Builder { + if visible != nil { + b.base.visible = *visible + } + return b +} + +func (b *Builder) Plugin(plugin *PluginID) *Builder { + b.base.plugin = plugin.CopyRef() + return b +} + +func (b *Builder) Extension(extension *PluginExtensionID) *Builder { + b.base.extension = extension.CloneRef() + return b +} + +func (b *Builder) Property(p *PropertyID) *Builder { + b.base.property = p.CopyRef() + return b +} + +func (b *Builder) Infobox(infobox *Infobox) *Builder { + b.base.infobox = infobox + return b +} + +func (b *Builder) Tags(tags *TagList) *Builder { + b.base.tags = tags + return b +} diff --git a/server/pkg/layer/decoding/common.go b/server/pkg/layer/decoding/common.go new file mode 100644 index 000000000..fbf8481d9 --- /dev/null +++ b/server/pkg/layer/decoding/common.go @@ -0,0 +1,532 @@ +package decoding + +import ( + "errors" + "fmt" + "math" + + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/czml" + "github.com/reearth/reearth-backend/pkg/kml" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +var ( + ErrBadColor = errors.New("bad color") + ErrFieldType = errors.New("incompatible field Type") +) + +var ( + extensions = map[string]layer.PluginExtensionID{ + "Point": layer.PluginExtensionID("marker"), + "Polygon": layer.PluginExtensionID("polygon"), + "Polyline": layer.PluginExtensionID("polyline"), + } + propertySchemas = map[string]property.SchemaID{ + "Point": property.MustSchemaID("reearth/marker"), + "Polygon": property.MustSchemaID("reearth/polygon"), + "Polyline": property.MustSchemaID("reearth/polyline"), + } + propertyItems = property.SchemaGroupID("default") + propertyFields = map[string]property.FieldID{ + "Point": property.FieldID("location"), + "Polygon": property.FieldID("polygon"), + "Polyline": property.FieldID("coordinates"), + } +) + +func rgbaToHex(rgba []int64) (string, error) { + hex := "" + if len(rgba) != 4 { + return "", ErrBadColor + } + for _, i := range rgba { + if i > 255 || i < 0 { + return "", ErrBadColor + } + h := fmt.Sprintf("%x", i) + if len(h) == 1 { + h += "0" + } + hex += h + } + return hex, nil +} + +func rgbafToHex(rgbaf []float64) (string, error) { + var rgba []int64 + if len(rgbaf) != 4 { + return "", ErrBadColor + } + for _, f := range rgbaf { + var i int64 + if f > 1.0 { + return "", ErrBadColor + } else if f == 1.0 { + i = 255 + } else { + i = int64(math.Floor(f * 256)) + } + + rgba = append(rgba, i) + } + return rgbaToHex(rgba) +} + +func MustCreateProperty(t string, v interface{}, sceneID layer.SceneID, styleItem interface{}, extension string) *property.Property { + p, err := createProperty(t, v, sceneID, styleItem, extension) + if err != nil { + panic(err) + } + return p +} + +func createProperty(t string, v interface{}, sceneID layer.SceneID, styleItem interface{}, extension string) (*property.Property, error) { + propertySchema := propertySchemas[t] + item := propertyItems + field := propertyFields[t] + ps := builtin.GetPropertySchema(propertySchema) + p, err := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchema). + Build() + if err != nil { + return nil, err + } + f, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, field), + ) + + switch t { + case "Point": + if pf, ok := v.(property.LatLngHeight); ok { + v2 := property.ValueTypeLatLng.ValueFrom(&property.LatLng{Lat: pf.Lat, Lng: pf.Lng}) + if v2 == nil { + return nil, ErrFieldType + } + f.UpdateUnsafe(v2) + + v3 := property.ValueTypeNumber.ValueFrom(pf.Height) + if v3 == nil { + return nil, ErrFieldType + } + f2, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "height"), + ) + + f2.UpdateUnsafe(v3) + } else if pf, ok := v.(property.LatLng); ok { + v2 := property.ValueTypeLatLng.ValueFrom(&property.LatLng{Lat: pf.Lat, Lng: pf.Lng}) + if v2 == nil { + return nil, ErrFieldType + } + f.UpdateUnsafe(v2) + } + if styleItem != nil { + switch extension { + case "kml": + s, ok := styleItem.(kml.Style) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + if s.IconStyle.Icon != nil && len(s.IconStyle.Icon.Href) > 0 { + imageValue := property.ValueTypeURL.ValueFrom(s.IconStyle.Icon.Href) + if imageValue == nil { + return nil, ErrFieldType + } + imageField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "image"), + ) + imageField.UpdateUnsafe(imageValue) + } + if s.IconStyle.Scale != 0 { + scaleValue := property.ValueTypeNumber.ValueFrom(s.IconStyle.Scale) + if scaleValue == nil { + return nil, ErrFieldType + } + scaleField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "imageSize"), + ) + scaleField.UpdateUnsafe(scaleValue) + } + if len(s.IconStyle.Color) > 0 { + colorValue := property.ValueTypeString.ValueFrom(s.IconStyle.Color) + if colorValue == nil { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "pointColor"), + ) + colorField.UpdateUnsafe(colorValue) + } + case "geojson": + s, ok := styleItem.(string) + if !ok { + return nil, ErrFieldType + } + if len(s) > 0 { + colorValue := property.ValueTypeString.ValueFrom(s) + if colorValue == nil { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "pointColor"), + ) + colorField.UpdateUnsafe(colorValue) + } + case "czml": + s, ok := styleItem.(*czml.Point) + if !ok { + return nil, ErrFieldType + } + if len(s.Color) > 0 { + colorValue := property.ValueTypeString.ValueFrom(s.Color) + if colorValue == nil { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "pointColor"), + ) + colorField.UpdateUnsafe(colorValue) + } + if s.PixelSize != 0 { + sizeValue := property.ValueTypeNumber.ValueFrom(s.PixelSize) + if sizeValue == nil { + return nil, ErrFieldType + } + sizeField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "pointSize"), + ) + sizeField.UpdateUnsafe(sizeValue) + } + } + } + case "Polygon": + v2 := property.ValueTypePolygon.ValueFrom(v) + if v2 == nil { + return nil, ErrFieldType + } + f.UpdateUnsafe(v2) + if styleItem != nil { + switch extension { + case "kml": + s, ok := styleItem.(kml.Style) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + if s.PolyStyle.Stroke { + stroke := property.ValueTypeBool.ValueFrom(s.PolyStyle.Stroke) + if stroke == nil { + return nil, ErrFieldType + } + strokeField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "stroke"), + ) + strokeField.UpdateUnsafe(stroke) + } + if s.LineStyle.Width != 0 { + width := property.ValueTypeNumber.ValueFrom(s.LineStyle.Width) + if width == nil { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + if len(s.LineStyle.Color) > 0 { + color := property.ValueTypeString.ValueFrom(s.LineStyle.Color) + if color == nil { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + if s.PolyStyle.Fill { + fill := property.ValueTypeBool.ValueFrom(s.PolyStyle.Fill) + if fill == nil { + return nil, ErrFieldType + } + fillField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fill"), + ) + fillField.UpdateUnsafe(fill) + } + if len(s.PolyStyle.Color) > 0 { + color := property.ValueTypeString.ValueFrom(s.PolyStyle.Color) + if color == nil { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fillColor"), + ) + colorField.UpdateUnsafe(color) + } + + case "czml": + s, ok := styleItem.(*czml.Polygon) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + if s.Stroke { + stroke := property.ValueTypeBool.ValueFrom(s.Stroke) + if stroke == nil { + return nil, ErrFieldType + } + strokeField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "stroke"), + ) + strokeField.UpdateUnsafe(stroke) + } + if s.StrokeWidth != 0 { + width := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) + if width == nil { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + if s.StrokeColor != nil { + var colorValue string + var err error + if len(s.StrokeColor.RGBA) > 0 { + colorValue, err = rgbaToHex(s.StrokeColor.RGBA) + if err != nil { + return nil, err + } + } + if len(s.StrokeColor.RGBAF) > 0 { + colorValue, err = rgbafToHex(s.StrokeColor.RGBAF) + if err != nil { + return nil, err + } + } + color := property.ValueTypeString.ValueFrom(colorValue) + if color == nil { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + if s.Fill { + fill := property.ValueTypeBool.ValueFrom(s.Fill) + if fill == nil { + return nil, ErrFieldType + } + fillField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fill"), + ) + fillField.UpdateUnsafe(fill) + } + if s.Material.SolidColor.Color != nil { + var colorValue string + var err error + if len(s.Material.SolidColor.Color.RGBA) > 0 { + colorValue, err = rgbaToHex(s.Material.SolidColor.Color.RGBA) + if err != nil { + return nil, err + } + } + if len(s.Material.SolidColor.Color.RGBAF) > 0 { + colorValue, err = rgbafToHex(s.Material.SolidColor.Color.RGBAF) + if err != nil { + return nil, err + } + } + color := property.ValueTypeString.ValueFrom(colorValue) + if color == nil { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fillColor"), + ) + colorField.UpdateUnsafe(color) + } + case "geojson": + s, ok := styleItem.(GeoStyle) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + + if s.StrokeWidth > 0 { + width := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) + if width == nil { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + + if len(s.FillColor) > 0 { + fill := property.ValueTypeString.ValueFrom(s.FillColor) + if fill == nil { + return nil, ErrFieldType + } + fillField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fillColor"), + ) + fillField.UpdateUnsafe(fill) + } + + if len(s.StrokeColor) > 0 { + color := property.ValueTypeString.ValueFrom(s.StrokeColor) + if color == nil { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + } + } + case "Polyline": + v2 := property.ValueTypeCoordinates.ValueFrom(v) + if v2 == nil { + return nil, ErrFieldType + } + f.UpdateUnsafe(v2) + if styleItem != nil { + switch extension { + case "kml": + s, ok := styleItem.(kml.Style) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + + if len(s.LineStyle.Color) > 0 { + color := property.ValueTypeString.ValueFrom(s.LineStyle.Color) + if color == nil { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + + if s.LineStyle.Width != 0 { + width := property.ValueTypeNumber.ValueFrom(s.LineStyle.Width) + if width == nil { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + case "czml": + s, ok := styleItem.(*czml.Polyline) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + + if s.Width != 0 { + width := property.ValueTypeNumber.ValueFrom(s.Width) + if width == nil { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + + if s.Material.PolylineOutline.Color != nil { + var colorValue string + var err error + + if len(s.Material.PolylineOutline.Color.RGBA) > 0 { + colorValue, err = rgbaToHex(s.Material.PolylineOutline.Color.RGBA) + if err != nil { + return nil, err + } + } + + if len(s.Material.PolylineOutline.Color.RGBAF) > 0 { + colorValue, err = rgbafToHex(s.Material.PolylineOutline.Color.RGBAF) + if err != nil { + return nil, err + } + } + + color := property.ValueTypeString.ValueFrom(colorValue) + if color == nil { + return nil, ErrFieldType + } + + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + case "geojson": + s, ok := styleItem.(GeoStyle) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + + if s.StrokeWidth > 0 { + width := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) + if width == nil { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + + if len(s.StrokeColor) > 0 { + color := property.ValueTypeString.ValueFrom(s.StrokeColor) + if color == nil { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + } + } + } + + return p, nil +} diff --git a/server/pkg/layer/decoding/common_test.go b/server/pkg/layer/decoding/common_test.go new file mode 100644 index 000000000..9ea357238 --- /dev/null +++ b/server/pkg/layer/decoding/common_test.go @@ -0,0 +1,84 @@ +package decoding + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRgbafToHex(t *testing.T) { + tests := []struct { + name string + rgba []float64 + expected string + err error + }{ + { + name: "orange", + rgba: []float64{1, 0.6471, 0, 1}, + expected: "ffa500ff", + err: nil}, + { + name: "RGBA length error", + rgba: []float64{1, 0.6471, 0, 1, 1}, + expected: "", + err: ErrBadColor}, + { + name: "RGBA greater than 1 error", + rgba: []float64{1, 1.6471, 0, 1, 1}, + expected: "", + err: ErrBadColor}, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res, err := rgbafToHex(tt.rgba) + if tt.err == nil { + assert.NoError(t, err) + assert.Equal(t, tt.expected, res) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} + +func TestRgbaToHex(t *testing.T) { + tests := []struct { + name string + rgba []int64 + expected string + err error + }{ + { + name: "orange", + rgba: []int64{255, 165, 0, 255}, + expected: "ffa500ff", + err: nil}, + { + name: "RGBA length error", + rgba: []int64{255, 165, 0}, + expected: "", + err: ErrBadColor}, + { + name: "RGBA bad boundaries ", + rgba: []int64{400, 165, 0, 1}, + expected: "", + err: ErrBadColor}, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res, err := rgbaToHex(tt.rgba) + if tt.err == nil { + assert.Equal(t, tt.expected, res) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} diff --git a/server/pkg/layer/decoding/czml.go b/server/pkg/layer/decoding/czml.go new file mode 100644 index 000000000..ed6a43c32 --- /dev/null +++ b/server/pkg/layer/decoding/czml.go @@ -0,0 +1,173 @@ +package decoding + +import ( + "encoding/json" + "errors" + + "github.com/reearth/reearth-backend/pkg/czml" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +type CZMLDecoder struct { + decoder *json.Decoder + sceneId layer.SceneID + groupName string +} + +func NewCZMLDecoder(d *json.Decoder, s layer.SceneID) *CZMLDecoder { + return &CZMLDecoder{ + decoder: d, + sceneId: s, + groupName: "", + } +} + +func (d *CZMLDecoder) Decode() (Result, error) { + var features []czml.Feature + var layers layer.Map + lg, err := layer.NewGroup().NewID().Scene(d.sceneId).Build() + if err != nil { + return Result{}, err + } + var properties property.Map + err = d.decoder.Decode(&features) + if err != nil { + return Result{}, errors.New("unable to parse file content") + } + + for _, v := range features { + var li *layer.Item + var p *property.Property + if v.Id == "document" { + d.groupName = v.Name + } + // case Polygon + //ff,_:=v.Polygon.StrokeColor.(map[string][]int64) + if v.Polygon != nil { + li, p, err = d.decodeLayer("Polygon", v.Polygon.Positions.CartographicDegrees, v.Polygon, v.Name) + } + // case Polyline + if v.Polyline != nil { + li, p, err = d.decodeLayer("Polyline", v.Polyline.Positions.CartographicDegrees, v.Polyline, v.Name) + } + // case Point + if v.Point != nil { + li, p, err = d.decodeLayer("Point", v.Position.CartographicDegrees, v.Point, v.Name) + } + if err != nil { + return Result{}, err + } + if li != nil { + var l layer.Layer = li + lg.Layers().AddLayer(l.ID(), -1) + lg.Rename(d.groupName) + layers = layers.Add(&l) + } + if p != nil { + properties = properties.Add(p) + } + } + + return resultFrom(lg, layers, properties) +} + +func (d *CZMLDecoder) decodeLayer(t string, coords []float64, style interface{}, layerName string) (*layer.Item, *property.Property, error) { + var p *property.Property + var l *layer.Item + var ex layer.PluginExtensionID + var err error + switch t { + case "Point": + var latlng property.LatLng + var height float64 + latlng = property.LatLng{ + Lng: coords[0], + Lat: coords[1], + } + + if len(coords) > 2 { + height = coords[2] + } + + p, err = createProperty("Point", property.LatLngHeight{ + Lat: latlng.Lat, + Lng: latlng.Lng, + Height: height, + }, d.sceneId, style, "czml") + + if err != nil { + return nil, nil, err + } + + ex = extensions["Point"] + if layerName == "" { + layerName = "Point" + } + case "Polyline": + var crds []property.LatLngHeight + if len(coords)%3 != 0 { + return nil, nil, errors.New("unable to parse coordinates") + } + + for { + crds = append(crds, property.LatLngHeight{Lng: coords[0], Lat: coords[1], Height: coords[2]}) + if len(coords) == 3 { + break + } else { + coords = coords[3:] + } + } + + ex = extensions["Polyline"] + p, err = createProperty("Polyline", crds, d.sceneId, style, "czml") + if err != nil { + return nil, nil, err + } + + if layerName == "" { + layerName = "Polyline" + } + case "Polygon": + var poly [][]property.LatLngHeight + if len(coords)%3 != 0 { + return nil, nil, errors.New("unable to parse coordinates") + } + + for { + var crds []property.LatLngHeight + crds = append(crds, property.LatLngHeight{Lng: coords[0], Lat: coords[1], Height: coords[2]}) + poly = append(poly, crds) + if len(coords) == 3 { + break + } else { + coords = coords[3:] + } + } + + ex = extensions["Polygon"] + p, err = createProperty("Polygon", poly, d.sceneId, style, "czml") + if err != nil { + return nil, nil, err + } + + if layerName == "" { + layerName = "Polygon" + } + } + + l, err = layer. + NewItem(). + NewID(). + Name(layerName). + Scene(d.sceneId). + Property(p.IDRef()). + Extension(&ex). + Plugin(&layer.OfficialPluginID). + Build() + if err != nil { + return nil, nil, err + } + + return l, p, nil +} diff --git a/server/pkg/layer/decoding/czml_test.go b/server/pkg/layer/decoding/czml_test.go new file mode 100644 index 000000000..e23b8665d --- /dev/null +++ b/server/pkg/layer/decoding/czml_test.go @@ -0,0 +1,145 @@ +package decoding + +import ( + "encoding/json" + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Decoder = &CZMLDecoder{} + +const ( + czmlmock = `[{ + "id" : "document", + "name" : "CZML Geometries", + "version" : "1.0" +}, +{ + "id" : "point 1", + "name": "point", + "position" : { + "cartographicDegrees" : [-111.0, 40.0, 0] + }, + "point": { + "color": "red", + "outlineColor": { + "rgba": [255, 0, 0, 255] + }, + "outlineWidth" : 4, + "pixelSize": 20 + } +}, +{ + "id" : "purpleLine", + "name" : "Purple arrow at height", + "polyline" : { + "positions" : { + "cartographicDegrees" : [ + -75, 43, 500000, + -125, 43, 500000 + ] + }, + "material" : { + "polylineOutline" : { + "color" : { + "rgba" : [148, 0, 211, 255] + } + } + }, + "arcType" : "NONE", + "width" : 10 + } +},{ + "id" : "testPoly", + "name" : "Red polygon on surface", + "polygon" : { + "positions" : { + "cartographicDegrees" : [ + -115.0, 37.0, 0, + -115.0, 32.0, 0, + -107.0, 33.0, 0, + -102.0, 31.0, 0, + -102.0, 35.0, 0 + ] + }, + "fill":true, + "outline":true, + "outlineWidth":4, + "material" : { + "solidColor" : { + "color" : { + "rgba" : [4, 190, 32, 144] + } + } + }, + "outlineColor":{ + "rgbaf" : [0.434,0.6,0.8766,0] + } + } +}]` +) + +func TestCZMLDecoder_Decode(t *testing.T) { + r := strings.NewReader(czmlmock) + d := json.NewDecoder(r) + s := layer.NewSceneID() + p := NewCZMLDecoder(d, s) + result, err := p.Decode() + assert.NoError(t, err) + assert.Equal(t, 4, len(result.Layers)) + assert.Equal(t, 3, len(result.Properties)) + + // Root layer + rootLayer := result.RootLayers().ToLayerGroupList()[0] + assert.NotNil(t, rootLayer) + assert.Equal(t, 3, rootLayer.Layers().LayerCount()) + + // marker + prop := result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(0)).Property()] + field := propertyFields["Point"] + f, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, field)) + fColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "pointColor")) + fSize, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "pointSize")) + assert.Equal(t, "red", fColor.Value().Value()) + assert.Equal(t, 20.0, fSize.Value().Value()) + assert.Equal(t, f.Value().Value(), property.LatLng{Lng: -111.0, Lat: 40.0}) + + // Polyline + prop = result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(1)).Property()] + field2 := propertyFields["Polyline"] + f2, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, field2)) + plist := property.Coordinates{{Lng: -75, Lat: 43, Height: 500000}, {Lng: -125, Lat: 43, Height: 500000}} + assert.Equal(t, f2.Value().Value(), plist) + strokeColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeColor")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, "9400d3ff", strokeColor.Value().Value()) + strokeWidth, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeWidth")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, 10.0, strokeWidth.Value().Value()) + + // Polygon + prop = result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(2)).Property()] + field3 := propertyFields["Polygon"] + f3, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, field3)) + plist2 := property.Polygon{property.Coordinates{property.LatLngHeight{Lng: -115, Lat: 37, Height: 0}}, property.Coordinates{property.LatLngHeight{Lng: -115, Lat: 32, Height: 0}}, property.Coordinates{property.LatLngHeight{Lng: -107, Lat: 33, Height: 0}}, property.Coordinates{property.LatLngHeight{Lng: -102, Lat: 31, Height: 0}}, property.Coordinates{property.LatLngHeight{Lng: -102, Lat: 35, Height: 0}}} + assert.Equal(t, f3.Value().Value(), plist2) + fill, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "fill")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, true, fill.Value().Value()) + stroke, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "stroke")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, true, stroke.Value().Value()) + fillColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "fillColor")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, "40be2090", fillColor.Value().Value()) + strokeColor2, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeColor")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, "6f99e000", strokeColor2.Value().Value()) + strokeWidth2, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeWidth")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, 4.0, strokeWidth2.Value().Value()) +} diff --git a/server/pkg/layer/decoding/decoder.go b/server/pkg/layer/decoding/decoder.go new file mode 100644 index 000000000..3467be356 --- /dev/null +++ b/server/pkg/layer/decoding/decoder.go @@ -0,0 +1,59 @@ +package decoding + +import ( + "fmt" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Decoder interface { + Decode() (Result, error) +} + +type Result struct { + Root *layer.IDList + Layers layer.Map + Properties property.Map +} + +func (r Result) RootLayers() layer.List { + return r.Layers.Pick(r.Root) +} + +func (r Result) Merge(r2 Result) Result { + root := r.Root.Clone() + root.Merge(r2.Root) + return Result{ + Root: root, + Layers: r.Layers.Merge(r2.Layers), + Properties: r.Properties.Merge(r2.Properties), + } +} + +func (r Result) MergeInitializerResult(r2 layer.InitializerResult) Result { + return Result{ + Root: r.Root.Clone().AppendLayers(r2.Root), + Layers: r.Layers.Merge(r2.Layers), + Properties: r.Properties.Merge(r2.Properties), + } +} + +func (r Result) Validate() error { + for _, l := range r.Layers.List().Deref() { + if err := l.ValidateProperties(r.Properties); err != nil { + return fmt.Errorf("layer %s is invalid: %w", l.ID(), err) + } + } + return nil +} + +func resultFrom(lg *layer.Group, layers layer.Map, properties property.Map) (r Result, err error) { + r = Result{ + Root: layer.NewIDList([]layer.ID{lg.ID()}), + Layers: layers.Add(lg.LayerRef()), + Properties: properties, + } + err = r.Validate() + return +} diff --git a/server/pkg/layer/decoding/format.go b/server/pkg/layer/decoding/format.go new file mode 100644 index 000000000..abf4d1caa --- /dev/null +++ b/server/pkg/layer/decoding/format.go @@ -0,0 +1,11 @@ +package decoding + +type LayerEncodingFormat string + +const ( + LayerEncodingFormatKML LayerEncodingFormat = "kml" + LayerEncodingFormatCZML LayerEncodingFormat = "czml" + LayerEncodingFormatGEOJSON LayerEncodingFormat = "geojson" + LayerEncodingFormatSHAPE LayerEncodingFormat = "shape" + LayerEncodingFormatREEARTH LayerEncodingFormat = "reearth" +) diff --git a/server/pkg/layer/decoding/geojson.go b/server/pkg/layer/decoding/geojson.go new file mode 100644 index 000000000..ab4516b7e --- /dev/null +++ b/server/pkg/layer/decoding/geojson.go @@ -0,0 +1,250 @@ +package decoding + +import ( + "errors" + "fmt" + "io" + + geojson "github.com/paulmach/go.geojson" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +type GeoStyle struct { + StrokeColor string `json:"stroke"` + StrokeWidth float64 `json:"stroke-width"` + FillColor string `json:"fill"` +} +type GeoJSONDecoder struct { + reader io.Reader + features []*geojson.Feature + sceneId layer.SceneID + groupName string +} + +func NewGeoJSONDecoder(r io.Reader, s layer.SceneID) *GeoJSONDecoder { + return &GeoJSONDecoder{ + reader: r, + sceneId: s, + groupName: "", + } +} + +func validateFeatures(fc []*geojson.Feature) []*geojson.Feature { + var res []*geojson.Feature + for _, f := range fc { + if f.Geometry == nil { + continue + } + if f.Geometry.Type == geojson.GeometryMultiPolygon { + for _, p := range f.Geometry.MultiPolygon { + nf := geojson.NewPolygonFeature(p) + for k, v := range f.Properties { + nf.SetProperty(k, v) + } + res = append(res, nf) + } + } else { + res = append(res, f) + } + } + + return res +} + +func (d *GeoJSONDecoder) Decode() (Result, error) { + lg, err := layer.NewGroup().NewID().Scene(d.sceneId).Name("GeoJSON").Build() + if err != nil { + return Result{}, err + } + + con, err := io.ReadAll(d.reader) + if err != nil { + return Result{}, errors.New("unable to parse file content") + } + fc, err := geojson.UnmarshalFeatureCollection(con) + + if err != nil { + return Result{}, errors.New("unable to parse file content") + } + fl := validateFeatures(fc.Features) + // if feature collection > append it to features list, else try to decode a single feature (layer) + if len(fc.Features) > 0 { + d.features = fl + } else { + f, err := geojson.UnmarshalFeature(con) + if err != nil { + return Result{}, errors.New("unable to parse file content") + } + d.features = append(d.features, f) + } + + var layers layer.Map + var properties property.Map + for range d.features { + li, p, err := d.decodeLayer() + if errors.Is(err, io.EOF) { + return resultFrom(lg, layers, properties) + } + if err != nil { + return Result{}, err + } + + if li != nil { + var l layer.Layer = li + lg.Layers().AddLayer(l.ID(), -1) + layers = layers.Add(&l) + } + + if p != nil { + properties = properties.Add(p) + } + } + + return resultFrom(lg, layers, properties) +} + +func (d *GeoJSONDecoder) decodeLayer() (*layer.Item, *property.Property, error) { + var feat *geojson.Feature + var p *property.Property + var l *layer.Item + var ex layer.PluginExtensionID + var err error + var stroke, fillColor string + var strokeWidth float64 + var ok bool + var layerName string + + if len(d.features) > 0 { + feat, d.features = d.features[0], d.features[1:] + } else { + return nil, nil, io.EOF + } + + switch feat.Geometry.Type { + case "Point": + var latlng property.LatLng + var height float64 + if len(feat.Geometry.Point) > 2 { + height = feat.Geometry.Point[2] + } + latlng = property.LatLng{ + Lat: feat.Geometry.Point[1], + Lng: feat.Geometry.Point[0], + } + + p, err = createProperty("Point", property.LatLngHeight{ + Lat: latlng.Lat, + Lng: latlng.Lng, + Height: height, + }, d.sceneId, feat.Properties["marker-color"], "geojson") + if err != nil { + return nil, nil, err + } + ex = extensions["Point"] + + layerName = "Point" + case "LineString": + var coords []property.LatLngHeight + for _, c := range feat.Geometry.LineString { + var height float64 + if len(c) == 2 { + height = 0 + } else if len(c) == 3 { + height = c[3] + } else { + return nil, nil, errors.New("unable to parse coordinates") + } + coords = append(coords, property.LatLngHeight{Lat: c[1], Lng: c[0], Height: height}) + } + + if feat.Properties["stroke"] != nil { + stroke, ok = feat.Properties["stroke"].(string) + if !ok { + return nil, nil, errors.New("unable to parse") + } + } + if feat.Properties["stroke-width"] != nil { + + strokeWidth, ok = feat.Properties["stroke-width"].(float64) + if !ok { + return nil, nil, errors.New("unable to parse") + } + } + ex = extensions["Polyline"] + p, err = createProperty("Polyline", coords, d.sceneId, GeoStyle{StrokeColor: stroke, StrokeWidth: strokeWidth}, "geojson") + if err != nil { + return nil, nil, err + } + + layerName = "Polyline" + case "Polygon": + var poly [][]property.LatLngHeight + for _, r := range feat.Geometry.Polygon { + var coords []property.LatLngHeight + for _, c := range r { + var height float64 + if len(c) == 2 { + height = 0 + } else if len(c) == 3 { + height = c[3] + } else { + return nil, nil, errors.New("unable to parse coordinates") + } + coords = append(coords, property.LatLngHeight{Lat: c[1], Lng: c[0], Height: height}) + } + poly = append(poly, coords) + } + + ex = extensions["Polygon"] + if feat.Properties["stroke"] != nil { + + stroke, ok = feat.Properties["stroke"].(string) + if !ok { + return nil, nil, errors.New("unable to parse") + } + } + + if feat.Properties["stroke-width"] != nil { + strokeWidth, ok = feat.Properties["stroke-width"].(float64) + if !ok { + return nil, nil, errors.New("unable to parse") + } + } + + if feat.Properties["stroke-width"] != nil { + fillColor, ok = feat.Properties["fill"].(string) + if !ok { + return nil, nil, errors.New("unable to parse") + } + } + + p, err = createProperty("Polygon", poly, d.sceneId, GeoStyle{StrokeColor: stroke, StrokeWidth: strokeWidth, FillColor: fillColor}, "geojson") + if err != nil { + return nil, nil, err + } + + layerName = "Polygon" + default: + return nil, nil, fmt.Errorf("unsupported type %s", feat.Geometry.Type) + } + + if feat.Properties["name"] != nil { + // name is not required, so no need to return error if name is not decoded + layerName, _ = feat.Properties["name"].(string) + } + + l, err = layer. + NewItem(). + NewID(). + Name(layerName). + Scene(d.sceneId). + Property(p.IDRef()). + Extension(&ex). + Plugin(&layer.OfficialPluginID). + Build() + if err != nil { + return nil, nil, err + } + return l, p, nil +} diff --git a/server/pkg/layer/decoding/geojson_test.go b/server/pkg/layer/decoding/geojson_test.go new file mode 100644 index 000000000..4f63e6de1 --- /dev/null +++ b/server/pkg/layer/decoding/geojson_test.go @@ -0,0 +1,235 @@ +package decoding + +import ( + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Decoder = &GeoJSONDecoder{} + +const geojsonmock = `{ + "type": "FeatureCollection", + "crs": { + "type": "name", + "properties": { + "name": "EPSG:3857" + } + }, + "features": [ + { + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": [ + 102.0, + 0.5 + ] + }, + "properties": { + "marker-color": "red" + } + }, + { + "type": "Feature", + "geometry": { + "type": "LineString", + "coordinates": [ + [ + 102.0, + 0.0 + ], + [ + 103.0, + 1.0 + ], + [ + 104.0, + 0.0 + ] + ] + }, + "properties": { + "stroke": "#b55e5e", + "stroke-width": 1.6, + "prop0": "value0", + "prop1": 0.0 + } + }, + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 100.0, + 0.0 + ], + [ + 101.0, + 0.0 + ], + [ + 101.0, + 1.0 + ], + [ + 100.0, + 1.0 + ], + [ + 100.0, + 0.0 + ] + ] + ] + }, + "properties": { + "stroke": "#ffffff", + "stroke-width": 2, + "stroke-opacity": 1, + "fill": "#7c3b3b", + "fill-opacity": 0.5, + "prop0": "value0", + "prop1": { + "this": "that" + } + } + }, +{ + "type": "Feature", + "geometry": null, + "properties": { + "N03_001": "ๆ„›็Ÿฅ็œŒ", + "N03_002": null, + "N03_003": null, + "N03_004": "่ฑŠๆฉ‹ๅธ‚", + "N03_007": "23201" + } + }, + { + "type": "Feature", + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 100.0, + 0.0 + ], + [ + 101.0, + 0.0 + ], + [ + 101.0, + 1.0 + ], + [ + 100.0, + 1.0 + ], + [ + 100.0, + 0.0 + ] + ] + ] + ] + }, + "properties": { + "stroke": "#ffffff", + "stroke-width": 2, + "stroke-opacity": 1, + "fill": "#7c3b3b", + "fill-opacity": 0.5, + "prop0": "value0", + "prop1": { + "this": "that" + } + } + } + ] +}` + +func TestGeoJSONDecoder_Decode(t *testing.T) { + r := strings.NewReader(geojsonmock) + s := layer.NewSceneID() + p := NewGeoJSONDecoder(r, s) + result, err := p.Decode() + assert.NoError(t, err) + assert.Equal(t, 5, len(result.Layers)) + assert.Equal(t, 4, len(result.Properties)) + + // Root layer + rootLayer := result.RootLayers().ToLayerGroupList()[0] + assert.NotNil(t, rootLayer) + assert.Equal(t, 4, rootLayer.Layers().LayerCount()) + + // marker + prop := result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(0)).Property()] + items := prop.Items() + assert.NotEqual(t, 0, len(items)) + item := propertyItems + field := propertyFields["Point"] + f, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, field)) + fColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "pointColor")) + assert.Equal(t, "red", fColor.Value().Value()) + assert.Equal(t, f.Value().Value(), property.LatLng{Lng: 102.0, Lat: 0.5}) + + // Polyline + prop = result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(1)).Property()] + items2 := prop.Items() + assert.NotEqual(t, 0, len(items2)) + field2 := propertyFields["Polyline"] + f2, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, field2)) + plist := property.Coordinates{{Lng: 102.0, Lat: 0.0, Height: 0}, {Lng: 103.0, Lat: 1.0, Height: 0}, {Lng: 104.0, Lat: 0.0, Height: 0}} + assert.Equal(t, f2.Value().Value(), plist) + strokeColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeColor")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, "#b55e5e", strokeColor.Value().Value()) + strokeWidth, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeWidth")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, 1.6, strokeWidth.Value().Value()) + + // Polygon + prop = result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(2)).Property()] + items3 := prop.Items() + assert.NotEqual(t, 0, len(items3)) + field3 := propertyFields["Polygon"] + f3, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, field3)) + plist2 := property.Polygon{property.Coordinates{property.LatLngHeight{Lng: 100, Lat: 0, Height: 0}, property.LatLngHeight{Lng: 101, Lat: 0, Height: 0}, property.LatLngHeight{Lng: 101, Lat: 1, Height: 0}, property.LatLngHeight{Lng: 100, Lat: 1, Height: 0}, property.LatLngHeight{Lng: 100, Lat: 0, Height: 0}}} + assert.Equal(t, f3.Value().Value(), plist2) + fillColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "fillColor")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, "#7c3b3b", fillColor.Value().Value()) + strokeColor2, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeColor")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, "#ffffff", strokeColor2.Value().Value()) + strokeWidth2, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeWidth")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, 2.0, strokeWidth2.Value().Value()) + + // MultiPolygon + prop = result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(2)).Property()] + items4 := prop.Items() + assert.NotEqual(t, 0, len(items4)) + field4 := propertyFields["Polygon"] + f4, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, field4)) + plist3 := property.Polygon{property.Coordinates{property.LatLngHeight{Lng: 100, Lat: 0, Height: 0}, property.LatLngHeight{Lng: 101, Lat: 0, Height: 0}, property.LatLngHeight{Lng: 101, Lat: 1, Height: 0}, property.LatLngHeight{Lng: 100, Lat: 1, Height: 0}, property.LatLngHeight{Lng: 100, Lat: 0, Height: 0}}} + assert.Equal(t, f4.Value().Value(), plist3) + fillColor2, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "fillColor")) + assert.Equal(t, plist3, f3.Value().Value()) + assert.Equal(t, "#7c3b3b", fillColor2.Value().Value()) + strokeColor3, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeColor")) + assert.Equal(t, plist3, f3.Value().Value()) + assert.Equal(t, "#ffffff", strokeColor3.Value().Value()) + strokeWidth3, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeWidth")) + assert.Equal(t, plist3, f3.Value().Value()) + assert.Equal(t, 2.0, strokeWidth3.Value().Value()) +} diff --git a/server/pkg/layer/decoding/kml.go b/server/pkg/layer/decoding/kml.go new file mode 100644 index 000000000..b09a19604 --- /dev/null +++ b/server/pkg/layer/decoding/kml.go @@ -0,0 +1,299 @@ +package decoding + +import ( + "encoding/xml" + "errors" + "io" + "regexp" + "strconv" + "strings" + + "github.com/reearth/reearth-backend/pkg/kml" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +type KMLDecoder struct { + decoder *xml.Decoder + sceneId layer.SceneID + styles map[string]kml.Style +} + +func NewKMLDecoder(d *xml.Decoder, s layer.SceneID) *KMLDecoder { + return &KMLDecoder{ + decoder: d, + sceneId: s, + styles: make(map[string]kml.Style), + } +} + +func coordinatesToLatLngHeight(c string) (*property.LatLng, float64, error) { + parts := strings.Split(strings.TrimSpace(c), ",") + lng, err := strconv.ParseFloat(parts[0], 64) + if err != nil { + return nil, 0, err + } + lat, err := strconv.ParseFloat(parts[1], 64) + if err != nil { + return nil, 0, err + } + LatLng := property.LatLng{Lat: lat, Lng: lng} + var height float64 + if len(parts) > 2 { + height, err = strconv.ParseFloat(parts[2], 64) + if err != nil { + return nil, 0, err + } + } + return &LatLng, height, nil +} + +func coordinatesToLatLngHeightList(c string) ([]property.LatLngHeight, error) { + var LatLngHeighList []property.LatLngHeight + coords := strings.Fields(c) + for _, llh := range coords { + reg, err := regexp.Compile(`\s+`) + if err != nil { + return nil, err + } + processed := reg.ReplaceAllString(llh, "") + parts := strings.Split(processed, ",") + lng, err := strconv.ParseFloat(parts[0], 64) + if err != nil { + return nil, err + } + lat, err := strconv.ParseFloat(parts[1], 64) + if err != nil { + return nil, err + } + heigh, err := strconv.ParseFloat(parts[2], 64) + if err != nil { + return nil, err + } + LatLngHeigh := property.LatLngHeight{Lat: lat, Lng: lng, Height: heigh} + LatLngHeighList = append(LatLngHeighList, LatLngHeigh) + } + + return LatLngHeighList, nil +} + +func getPolygon(p *kml.Polygon) ([][]property.LatLngHeight, error) { + var pol [][]property.LatLngHeight + outer, err := coordinatesToLatLngHeightList(p.OuterBoundaryIs.LinearRing.Coordinates) + if err != nil { + return nil, err + } + pol = append(pol, outer) + if len(p.InnerBoundaryIs) > 0 { + for _, ib := range p.InnerBoundaryIs { + coords, err := coordinatesToLatLngHeightList(ib.LinearRing.Coordinates) + if err != nil { + return nil, err + } + pol = append(pol, coords) + } + } + return pol, nil +} + +func (d *KMLDecoder) parseKML() (interface{}, error) { + for { + token, err := d.decoder.Token() + if errors.Is(err, io.EOF) || token == nil { + return nil, io.EOF + } + if err != nil { + return nil, err + } + switch startElement := token.(type) { + case xml.StartElement: + st := startElement.Name.Local + switch st { + case "Document", "Folder": + var c kml.Collection + err := d.decoder.DecodeElement(&c, &startElement) + if err != nil { + return nil, err + } + return c, nil + case "Placemark": + var p kml.Placemark + err := d.decoder.DecodeElement(&p, &startElement) + if err != nil { + return nil, err + } + return p, nil + } + } + } +} + +func (d *KMLDecoder) decodeCollection(c kml.Collection, depth int) (*layer.Group, layer.Map, property.Map, error) { + var ll layer.Map + var pl property.Map + lg, err := layer.NewGroup().NewID().Name(c.Name).Scene(d.sceneId).Build() + if err != nil { + return nil, nil, nil, err + } + + if len(c.Styles) > 0 { + for _, s := range c.Styles { + d.styles[s.Id] = s + } + } + + for _, f := range c.Folders { + flg, flil, fpl, err := d.decodeCollection(f, depth+1) + if err != nil { + return nil, nil, nil, err + } + + if depth >= 4 { + lg.Layers().AppendLayers(flg.Layers().Layers()...) + } else { + lg.Layers().AppendLayers(flg.ID()) + ll = ll.Add(flg.LayerRef()) + } + + ll = ll.Merge(flil) + pl = pl.Merge(fpl) + } + + for _, p := range c.Placemarks { + pli, pp, err := d.decodePlacemark(p) + if err != nil { + return nil, nil, nil, err + } + lg.Layers().AppendLayers(pli.ID()) + var l layer.Layer = pli + ll = ll.Add(&l) + pl = pl.Add(pp) + } + + return lg, ll, pl, nil +} + +func (d *KMLDecoder) decodePlacemark(p kml.Placemark) (*layer.Item, *property.Property, error) { + var layerItem *layer.Item + var prop *property.Property + var ex layer.PluginExtensionID + var styleId string + var layerName string + + if len(p.StyleUrl) > 0 { + styleId = p.StyleUrl[1:] + } + + if len(p.Point.Coordinates) > 0 { + latlng, height, err := coordinatesToLatLngHeight(p.Point.Coordinates) + if err != nil { + return nil, nil, err + } + prop, err = createProperty("Point", property.LatLngHeight{ + Lat: latlng.Lat, + Lng: latlng.Lng, + Height: height, + }, d.sceneId, d.styles[styleId], "kml") + if err != nil { + return nil, nil, err + } + ex = extensions["Point"] + layerName = "Point" + } else if len(p.Polygon.OuterBoundaryIs.LinearRing.Coordinates) > 0 { + coordslist, err := getPolygon(&p.Polygon) + if err != nil { + return nil, nil, err + } + ex = extensions["Polygon"] + layerName = "Polygon" + prop, err = createProperty("Polygon", coordslist, d.sceneId, d.styles[styleId], "kml") + if err != nil { + return nil, nil, err + } + } else if len(p.Polyline.Coordinates) > 0 { + coords, err := coordinatesToLatLngHeightList(p.Polyline.Coordinates) + if err != nil { + return nil, nil, err + } + ex = extensions["Polyline"] + layerName = "Polyline" + prop, err = createProperty("Polyline", coords, d.sceneId, d.styles[styleId], "kml") + if err != nil { + return nil, nil, err + } + } else { + var err error + prop, err = createProperty("Point", nil, d.sceneId, d.styles[styleId], "kml") + if err != nil { + return nil, nil, err + } + ex = extensions["Point"] + layerName = "Point" + } + + if len(p.Name) > 0 { + layerName = p.Name + } + + layerItem, err := layer. + NewItem(). + NewID(). + Name(layerName). + Scene(d.sceneId). + Property(prop.IDRef()). + Extension(&ex). + Plugin(&layer.OfficialPluginID). + Build() + if err != nil { + return nil, nil, err + } + + return layerItem, prop, nil +} + +func (d *KMLDecoder) Decode() (Result, error) { + var ll layer.Map + var lg *layer.Group + var pl property.Map + + for { + parsed, err := d.parseKML() + if errors.Is(err, io.EOF) { + break + } + if err != nil { + return Result{}, err + } + + switch p := parsed.(type) { + case kml.Collection: + lg, ll, pl, err = d.decodeCollection(p, 0) + if err != nil { + return Result{}, err + } + case kml.Placemark: + if lg == nil { + lg, err = layer.NewGroup().NewID().Scene(d.sceneId).Name("KML").Build() + if err != nil { + return Result{}, err + } + } + + li, pp, err := d.decodePlacemark(p) + if err != nil { + return Result{}, err + } + + if li != nil { + lg.Layers().AddLayer(li.ID(), -1) + ll = ll.Add(li.LayerRef()) + } + + if pp != nil { + pl = pl.Add(pp) + } + } + } + + return resultFrom(lg, ll, pl) +} diff --git a/server/pkg/layer/decoding/kml_test.go b/server/pkg/layer/decoding/kml_test.go new file mode 100644 index 000000000..aec6d267c --- /dev/null +++ b/server/pkg/layer/decoding/kml_test.go @@ -0,0 +1,752 @@ +package decoding + +import ( + "encoding/xml" + "errors" + "io" + "net/url" + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/kml" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Decoder = &KMLDecoder{} + +const kmlmock = ` + + + + + + + + CZML Geometries + + -122.0822035425683,37.42228990140251,43 + + #CZMLGeometries_01e65 + + + #examplePolyStyle + + + + -77.05788457660967,38.87253259892824,100 + -77.05465973756702,38.87291016281703,100 + -77.05315536854791,38.87053267794386,100 + + + + + + #exampleLineStyle + + 1 + -112.0814237830345,36.10677870477137,0 + -112.0870267752693,36.0905099328766,0 + + + + + +` + +func TestNewKMLDecoder(t *testing.T) { + d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(``)), layer.NewSceneID()) + assert.NotNil(t, d) +} + +func TestKMLDecoder_Decode(t *testing.T) { + r := strings.NewReader(kmlmock) + d := xml.NewDecoder(r) + s := layer.NewSceneID() + k := NewKMLDecoder(d, s) + + result, err := k.Decode() + assert.NoError(t, err) + assert.Equal(t, 6, len(result.Layers)) + assert.Equal(t, 4, len(result.Properties)) + + // Root layer + rootLayer := result.RootLayers().ToLayerGroupList()[0] + assert.NotNil(t, rootLayer) + assert.Equal(t, 1, rootLayer.Layers().LayerCount()) + + // Folder + folder := result.Layers.Group(rootLayer.Layers().LayerAt(0)) + assert.NotNil(t, folder) + assert.Equal(t, 4, folder.Layers().LayerCount()) + + // Marker Test + l := result.Layers.Layer(folder.Layers().LayerAt(0)) + prop := result.Properties[*l.Property()] + fPoint, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, propertyFields["Point"])) + assert.Equal(t, property.LatLng{Lng: -122.0822035425683, Lat: 37.42228990140251}, fPoint.Value().Value()) + fColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "pointColor")) + assert.Equal(t, "ff00ff7f", fColor.Value().Value()) + fSize, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "imageSize")) + assert.Equal(t, 4.0, fSize.Value().Value()) + fImage, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "image")) + actUrl, _ := url.Parse("http://maps.google.com/mapfiles/kml/pal3/icon19.png") + assert.Equal(t, actUrl, fImage.Value().Value()) + fh, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, property.FieldID("height"))) + assert.Equal(t, 43.0, fh.Value().Value()) + + // Polygon test + l = result.Layers.Layer(folder.Layers().LayerAt(1)) + prop = result.Properties[*l.Property()] + polygon, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, propertyFields["Polygon"])) + assert.Equal(t, property.Polygon{{ + {Lng: -77.05788457660967, Lat: 38.87253259892824, Height: 100}, + {Lng: -77.05465973756702, Lat: 38.87291016281703, Height: 100}, + {Lng: -77.0531553685479, Lat: 38.87053267794386, Height: 100}, + }}, polygon.Value().Value()) + fill, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "fill")) + assert.Equal(t, true, fill.Value().Value()) + stroke, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "stroke")) + assert.Equal(t, true, stroke.Value().Value()) + fillColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "fillColor")) + assert.Equal(t, "FF0000", fillColor.Value().Value()) + strokeColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeColor")) + assert.Equal(t, "4DFF0000", strokeColor.Value().Value()) + strokeWidth, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeWidth")) + assert.Equal(t, 4.0, strokeWidth.Value().Value()) + + // Polyline test + l = result.Layers.Layer(folder.Layers().LayerAt(2)) + prop = result.Properties[*l.Property()] + polyline, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, propertyFields["Polyline"])) + assert.Equal(t, property.Coordinates{ + {Lng: -112.0814237830345, Lat: 36.10677870477137, Height: 0}, + {Lng: -112.0870267752693, Lat: 36.0905099328766, Height: 0}, + }, polyline.Value().Value()) + strokeColor2, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeColor")) + assert.Equal(t, "4DFF0000", strokeColor2.Value().Value()) + strokeWidth2, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeWidth")) + assert.Equal(t, 4.0, strokeWidth2.Value().Value()) + + // Empty test + l = result.Layers.Layer(folder.Layers().LayerAt(3)) + prop = result.Properties[*l.Property()] + assert.Equal(t, propertySchemas["Point"], prop.Schema()) + point, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, propertyFields["Point"])) + assert.Nil(t, point.Value().Value()) +} + +//func TestKMLCoordinatesToLatLng(t *testing.T) { +// tests := []struct { +// name, cords string +// expectedLatLng *property.LatLng +// expectedHeight float64 +// err error +// }{ +// { +// name: "Valid LatLng", cords: "-122.0822035425683,37.42228990140251,43", +// expectedLatLng: &property.LatLng{ +// Lng: -122.0822035425683, +// Lat: 37.42228990140251, +// }, +// expectedHeight: 43, +// err: nil, +// }, +// { +// name: "Failed to parse Lat", cords: "-122.0822035425683,xxx,43", +// expectedLatLng: nil, +// expectedHeight: 0, +// err: strconv.ErrSyntax, +// }, +// { +// name: "Failed to parse Lng", cords: "xxx,-122.0822035425683,43", +// expectedLatLng: nil, +// expectedHeight: 0, +// err: strconv.ErrSyntax, +// }, +// { +// name: "Failed to parse Height", cords: "-122.0822035425683,43,xxx", +// expectedLatLng: nil, +// expectedHeight: 0, +// err: strconv.ErrSyntax, +// }, +// } +// for _, tt := range tests { +// tt := tt +// t.Run(tt.name, func(t *testing.T) { +// t.Parallel() +// ll, h, err := coordinatesToLatLngHeight(tt.cords) +// if tt.err == nil { +// assert.True(t, reflect.DeepEqual(ll, tt.expectedLatLng)) +// assert.Equal(t, tt.expectedHeight, h) +// } else { +// assert.Equal(t, tt.err, err) +// } +// }) +// } +//} +// +//func TestKMLCoordinatesToLatLngList(t *testing.T) { +// tests := []struct { +// name, cords string +// expected []property.LatLngHeight +// err error +// }{ +// { +// name: "Valid Cords", cords: ` -112.0814237830345,36.10677870477137,0 +// -112.0870267752693,36.0905099328766,0 `, +// expected: []property.LatLngHeight{ +// { +// Lat: 36.10677870477137, +// Lng: -112.0814237830345, +// Height: 0, +// }, +// { +// Lat: 36.0905099328766, +// Lng: -112.0870267752693, +// Height: 0, +// }, +// }, +// err: nil, +// }, +// { +// name: "Failed to parse Lng", cords: ` xxx,36.10677870477137,0 +// -112.0870267752693,36.0905099328766,0 `, +// expected: nil, +// err: strconv.ErrSyntax, +// }, +// { +// name: "Failed to parse Lat", cords: ` -112.0814237830345,xxx,0 +// -112.0870267752693,36.0905099328766,0 `, +// expected: nil, +// err: strconv.ErrSyntax, +// }, +// { +// name: "Failed to parse Height", cords: ` -112.0814237830345,36.10677870477137,xxx +// -112.0870267752693,36.0905099328766,0 `, +// expected: nil, +// err: strconv.ErrSyntax, +// }, +// } +// for _, tt := range tests { +// tt := tt +// t.Run(tt.name, func(t *testing.T) { +// t.Parallel() +// res, err := coordinatesToLatLngHeightList(tt.cords) +// if tt.err == nil { +// assert.True(t, reflect.DeepEqual(res, tt.expected)) +// } else { +// assert.Equal(t, tt.err, err) +// } +// }) +// } +//} +//func TestKMLGetPolygon(t *testing.T) { +// cl1 := []property.LatLngHeight{ +// { +// Lng: 36, +// Lat: -112, +// Height: 0, +// }, +// { +// Lng: 34, +// Lat: -112, +// Height: 0, +// }, +// { +// Lng: 35, +// Lat: -111, +// Height: 0, +// }, +// } +// cl2 := []property.LatLngHeight{ +// { +// Lng: 35, +// Lat: -111, +// Height: 10, +// }, +// { +// Lng: 32, +// Lat: -109, +// Height: 10, +// }, +// { +// Lng: 34, +// Lat: -119, +// Height: 10, +// }, +// } +// expected := [][]property.LatLngHeight{cl1, cl2} +// tests := []struct { +// name string +// polygon *kml.Polygon +// expected [][]property.LatLngHeight +// err error +// }{ +// { +// name: "Valid Polygon", +// polygon: &kml.Polygon{ +// OuterBoundaryIs: kml.BoundaryIs{ +// LinearRing: kml.LinearRing{ +// Coordinates: ` 36,-112,0 +// 34,-112,0 +// 35,-111,0`, +// }, +// }, +// InnerBoundaryIs: []kml.BoundaryIs{ +// { +// LinearRing: kml.LinearRing{ +// Coordinates: ` 35,-111,10 +// 32,-109,10 +// 34,-119,10 `, +// }, +// }, +// }, +// }, +// expected: expected, +// err: nil, +// }, +// { +// name: "Failed to parse Outer", +// polygon: &kml.Polygon{ +// OuterBoundaryIs: kml.BoundaryIs{ +// LinearRing: kml.LinearRing{ +// Coordinates: ` xxx,-112,0 +// 34,-112,0 +// 35,-111,0`, +// }, +// }, +// InnerBoundaryIs: []kml.BoundaryIs{ +// { +// LinearRing: kml.LinearRing{ +// Coordinates: ` 35,-111,10 +// 32,-109,10 +// 34,-119,10 `, +// }, +// }, +// }, +// }, +// expected: nil, +// err: strconv.ErrSyntax, +// }, +// { +// name: "Failed to parse Inner", +// polygon: &kml.Polygon{ +// OuterBoundaryIs: kml.BoundaryIs{ +// LinearRing: kml.LinearRing{ +// Coordinates: ` 36,-112,0 +// 34,-112,0 +// 35,-111,0`, +// }, +// }, +// InnerBoundaryIs: []kml.BoundaryIs{ +// { +// LinearRing: kml.LinearRing{ +// Coordinates: ` xxx,-111,10 +// 32,-109,10 +// 34,-119,10 `, +// }, +// }, +// }, +// }, +// +// expected: nil, +// err: strconv.ErrSyntax, +// }, +// } +// for _, tt := range tests { +// tt := tt +// t.Run(tt.name, func(t *testing.T) { +// t.Parallel() +// res, err := getPolygon(tt.polygon) +// if tt.err == nil { +// assert.True(t, reflect.DeepEqual(res, tt.expected)) +// } else { +// assert.Equal(t, tt.err, err) +// } +// }) +// } +//} +// +func TestKMLparseKML(t *testing.T) { + s := layer.NewSceneID() + + tests := []struct { + name, KMLstr string + expected interface{} + err error + }{ + { + name: "parse document", + KMLstr: ` + + + test_doc + + `, + expected: kml.Collection{ + Folders: nil, + Placemarks: nil, + Styles: nil, + Name: "test_doc", + }, + err: io.EOF, + }, + { + name: "parse folder", + KMLstr: ` + + + test_fol + + `, + expected: kml.Collection{ + Folders: nil, + Placemarks: nil, + Styles: nil, + Name: "test_fol", + }, + err: io.EOF, + }, + { + name: "parse placemark", + KMLstr: ` + + + test_place + + `, + expected: kml.Placemark{ + Point: kml.Point{Coordinates: ""}, + Polygon: kml.Polygon{ + OuterBoundaryIs: kml.BoundaryIs{ + LinearRing: kml.LinearRing{Coordinates: ""}, + }, + InnerBoundaryIs: []kml.BoundaryIs(nil)}, + Polyline: kml.LineString{Coordinates: ""}, + Name: "test_place", + StyleUrl: "", + }, + err: io.EOF, + }, + { + name: "err parse token", + KMLstr: ` + + + test_place + + + `, + expected: nil, + err: errors.New("XML syntax error on line 5: element closed by "), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(tt.KMLstr)), s) + res, err := d.parseKML() + if tt.expected != nil { + assert.Equal(t, tt.expected, res) + } else { + assert.Equal(t, tt.err.Error(), err.Error()) + } + }) + } +} +func TestKMLdecodePlacemark(t *testing.T) { + s := layer.NewSceneID() + point := MustCreateProperty("Point", property.LatLngHeight{ + Lat: 23, + Lng: 40, + Height: 0, + }, s, nil, "kml") + polyline := MustCreateProperty("Polyline", property.Coordinates{ + property.LatLngHeight{ + Lat: 23, + Lng: 40, + Height: 0, + }, + property.LatLngHeight{ + Lat: 66, + Lng: 34, + Height: 10, + }, + }, s, nil, "kml") + polygon := MustCreateProperty("Polygon", []property.Coordinates{{ + property.LatLngHeight{ + Lat: 23, + Lng: 40, + Height: 0, + }, + property.LatLngHeight{ + Lat: 66, + Lng: 34, + Height: 10, + }, + property.LatLngHeight{ + Lat: 50, + Lng: 12, + Height: 3, + }, + }, + }, s, nil, "kml") + pointExt := extensions["Point"] + polylineExt := extensions["Polyline"] + polygonExt := extensions["Polygon"] + + tests := []struct { + name, pt string + placemark kml.Placemark + expectedLayer *layer.Item + expectedProperty *property.Property + err error + }{ + { + name: "parse point", + pt: "Point", + placemark: kml.Placemark{ + Point: kml.Point{ + Coordinates: "40,23,0", + }, + Polygon: kml.Polygon{}, + Polyline: kml.LineString{}, + Name: "", + StyleUrl: "", + }, + expectedLayer: layer. + NewItem(). + NewID(). + Name("Point"). + Scene(s). + Property(point.IDRef()). + Extension(&pointExt). + Plugin(&layer.OfficialPluginID). + MustBuild(), + expectedProperty: point, + err: nil, + }, + { + name: "parse polyline", + pt: "Polyline", + placemark: kml.Placemark{ + Point: kml.Point{}, + Polygon: kml.Polygon{}, + Polyline: kml.LineString{ + Coordinates: `40,23,0 + 34,66,10`}, + Name: "", + StyleUrl: "", + }, + expectedLayer: layer. + NewItem(). + NewID(). + Name("Polyline"). + Scene(s). + Property(polyline.IDRef()). + Extension(&polylineExt). + Plugin(&layer.OfficialPluginID). + MustBuild(), + expectedProperty: polyline, + err: nil, + }, + { + name: "parse polygon", + pt: "Polygon", + placemark: kml.Placemark{ + Point: kml.Point{}, + Polygon: kml.Polygon{ + OuterBoundaryIs: kml.BoundaryIs{ + LinearRing: kml.LinearRing{ + Coordinates: `40,23,0 + 34,66,10 + 12,50,3`, + }, + }, + InnerBoundaryIs: nil, + }, + Polyline: kml.LineString{}, + Name: "", + StyleUrl: "", + }, + expectedLayer: layer. + NewItem(). + NewID(). + Name("Polygon"). + Scene(s). + Property(polygon.IDRef()). + Extension(&polygonExt). + Plugin(&layer.OfficialPluginID). + MustBuild(), + expectedProperty: polygon, + err: nil, + }, + { + name: "parse other", + pt: "Point", + placemark: kml.Placemark{ + Point: kml.Point{}, + Polygon: kml.Polygon{}, + Polyline: kml.LineString{}, + Name: "", + StyleUrl: "", + }, + expectedLayer: layer. + NewItem(). + NewID(). + Name("Point"). + Scene(s). + Extension(&pointExt). + Plugin(&layer.OfficialPluginID). + MustBuild(), + expectedProperty: nil, + err: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(``)), s) + l, p, err := d.decodePlacemark(tt.placemark) + if tt.err == nil { + assert.NotNil(t, l) + assert.NotNil(t, p) + assert.Equal(t, l.Name(), tt.expectedLayer.Name()) + ps := builtin.GetPropertySchema(propertySchemas[tt.pt]) + fa, _, _, _ := p.GetOrCreateField(ps, property.PointFieldBySchemaGroup(propertyItems, propertyFields[tt.pt])) + fe, _, _, _ := tt.expectedProperty.GetOrCreateField(ps, property.PointFieldBySchemaGroup(propertyItems, propertyFields[tt.pt])) + assert.Equal(t, fe.Value(), fa.Value()) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} + +// @todo not finished yet +//func TestKMLdecodeCollection(t *testing.T) { +// // @todo err and style cases +// s := layer.NewSceneID() +// pointExt := extensions["Point"] +// point := MustCreateProperty("Point", property.LatLngHeight{ +// Lat: 39, +// Lng: 20, +// Height: 4, +// }, s, nil, "kml") +// li := layer. +// NewItem(). +// NewID(). +// Name("test_placemark"). +// Scene(s). +// Property(point.IDRef()). +// Extension(&pointExt). +// Plugin(&layer.OfficialPluginID). +// MustBuild() +// var ll layer.Layer = li +// tests := []struct { +// name string +// collection *kml.Collection +// expectedLayers []*layer.Layer +// expectedProperties []*property.Property +// expectedGroupLayer *layer.Group +// err error +// }{ +// { +// name: "Folders", +// collection: &kml.Collection{ +// Folders: []kml.Collection{}, +// Placemarks: nil, +// Styles: nil, +// Name: "test_folder", +// }, +// expectedLayers: nil, +// expectedProperties: nil, +// expectedGroupLayer: layer.NewGroup().NewID().Name("test_folder").MustBuild(), +// err: nil, +// }, +// { +// name: "Placemarks", +// collection: &kml.Collection{ +// Folders: nil, +// Placemarks: []kml.Placemark{ +// { +// Point: kml.Point{Coordinates: `20,39,4`}, +// Name: "test_placemark", +// StyleUrl: "", +// }, +// }, +// Styles: nil, +// Name: "test_placemark_group", +// }, +// expectedGroupLayer: layer.NewGroup().NewID().Name("test_placemark_group").MustBuild(), +// expectedLayers: []*layer.Layer{&ll}, +// expectedProperties: []*property.Property{point}, +// err: nil, +// }, +// } +// +// for _, tt := range tests { +// tt := tt +// t.Run(tt.name, func(t *testing.T) { +// d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(``)), s) +// _, lm, pm, _ := d.decodeCollection(*tt.collection, 0) +// //if tt.err == nil { +// // if tt.expectedGroupLayer != nil { +// // assert.NotNil(t, lg) +// // assert.Equal(t, tt.expectedGroupLayer.Name(), lg.Name()) +// // } +// // if tt.expectedLayers != nil { +// // assert.NotNil(t, ll) +// // assert.True(t, len(ll) == 1) +// // el := *tt.expectedLayers[0] +// // al := *ll[0] +// // assert.Equal(t, el.Name(), al.Name()) +// // assert.NotNil(t, al.Property()) +// // } +// // if tt.expectedProperties != nil { +// // assert.NotNil(t, pl) +// // assert.True(t, len(pl) == 1) +// // ep := *tt.expectedProperties[0] +// // ap := pl.Keys()[0] +// // fa, _, _, _ := ap.GetOrCreateField(builtin.GetPropertySchema(propertySchemas["Point"]), property.PointFieldBySchemaGroup(propertyItems, propertyFields["Point"])) +// // fe, _, _, _ := ep.GetOrCreateField(builtin.GetPropertySchema(propertySchemas["Point"]), property.PointFieldBySchemaGroup(propertyItems, propertyFields["Point"])) +// // assert.Equal(t, fe.Value(), fa.Value()) +// // } +// //} else { +// // assert.Equal(t, tt.err, err) +// //} +// }) +// } +// +//} diff --git a/server/pkg/layer/decoding/reearth.go b/server/pkg/layer/decoding/reearth.go new file mode 100644 index 000000000..ee484d3ed --- /dev/null +++ b/server/pkg/layer/decoding/reearth.go @@ -0,0 +1,295 @@ +package decoding + +import ( + "encoding/json" + "errors" + + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +type ReearthDecoder struct { + d *json.Decoder + scene layer.SceneID +} + +func NewReearthDecoder(d *json.Decoder, scene layer.SceneID) *ReearthDecoder { + return &ReearthDecoder{d: d, scene: scene} +} + +func (d *ReearthDecoder) Decode() (r Result, err error) { + if d == nil || d.d == nil { + return + } + + var root ReearthRoot + if err = d.d.Decode(&root); err != nil { + return + } + + r, err = root.Result(d.scene) + if err != nil { + return + } + + err = r.Validate() + return +} + +type ReearthRoot struct { + Reearth int `json:"reearth"` + Layers []*ReearthLayer `json:"layers"` +} + +func (r *ReearthRoot) Result(scene layer.SceneID) (result Result, err error) { + if r == nil { + return + } + + if r.Reearth != 1 { + err = errors.New("not supported version") + return + } + + for _, l := range r.Layers { + var result2 layer.InitializerResult + i := l.layer() + if result2, err = i.Layer(scene); err != nil { + return + } + result = result.MergeInitializerResult(result2) + } + + return +} + +type ReearthLayer struct { + Plugin *layer.PluginID `json:"plugin"` + Extension *layer.PluginExtensionID `json:"extension"` + Name string `json:"name"` + Infobox *ReearthInfobox `json:"infobox"` + Property *ReearthProperty `json:"property"` + Layers []ReearthLayer `json:"layers"` + IsVisible *bool `json:"isVisible"` + LinkedDatasetSchema *layer.DatasetSchemaID `json:"linkedDatasetSchema"` + LinkedDataset *layer.DatasetID `json:"linkedDataset"` +} + +func (l *ReearthLayer) layer() *layer.Initializer { + if l == nil { + return nil + } + + var layers []*layer.Initializer + if l.Layers != nil { + layers = make([]*layer.Initializer, 0, len(l.Layers)) + for _, l2 := range l.Layers { + if l3 := l2.layer(); l3 != nil { + layers = append(layers, l3) + } + } + } + + var psid *property.SchemaID + if l.Plugin != nil && l.Extension != nil { + psid = layer.NewPropertySchemaID(*l.Plugin, l.Extension.String()).Ref() + } + + var pr *property.Initializer + if l.Property != nil { + pr = l.Property.property(psid) + } + + return &layer.Initializer{ + Plugin: l.Plugin, + Extension: l.Extension, + Name: l.Name, + Infobox: l.Infobox.infobox(), + IsVisible: l.IsVisible, + Property: pr, + LinkedDatasetSchema: l.LinkedDatasetSchema, + LinkedDataset: l.LinkedDataset, + Layers: layers, + } +} + +type ReearthInfobox struct { + Property *ReearthProperty `json:"property"` + Blocks []*ReearthInfoboxField `json:"blocks"` +} + +func (i *ReearthInfobox) infobox() *layer.InitializerInfobox { + if i == nil { + return nil + } + + var blocks []*layer.InitializerInfoboxField + if i.Blocks != nil { + blocks = make([]*layer.InitializerInfoboxField, 0, len(i.Blocks)) + for _, f := range i.Blocks { + if f2 := f.infoboxField(); f2 != nil { + blocks = append(blocks, f2) + } + } + } + + var pr *property.Initializer + if i.Property != nil { + pr = i.Property.property(builtin.PropertySchemaIDInfobox.Ref()) + } + + return &layer.InitializerInfobox{ + Property: pr, + Fields: blocks, + } +} + +type ReearthInfoboxField struct { + Plugin layer.PluginID `json:"plugin"` + Extension layer.PluginExtensionID `json:"extension"` + Property *ReearthProperty `json:"property"` +} + +func (f *ReearthInfoboxField) infoboxField() *layer.InitializerInfoboxField { + if f == nil || f.Plugin.IsNil() || f.Extension == "" { + return nil + } + + psid := layer.NewPropertySchemaID(f.Plugin, f.Extension.String()).Ref() + + var pr *property.Initializer + if f.Property != nil { + pr = f.Property.property(psid) + } + + return &layer.InitializerInfoboxField{ + Plugin: f.Plugin, + Extension: f.Extension, + Property: pr, + } +} + +type ReearthProperty map[property.SchemaGroupID]ReearthPropertyItem + +func (p ReearthProperty) property(schema *property.SchemaID) *property.Initializer { + if schema == nil || p == nil { + return nil + } + + var items []*property.InitializerItem + items = make([]*property.InitializerItem, 0, len(p)) + for k, i := range p { + items = append(items, i.propertyItem(k)) + } + + return &property.Initializer{ + Schema: *schema, + Items: items, + } +} + +type ReearthPropertyItem struct { + Groups []ReearthPropertyGroup `json:"groups"` + Fields ReearthPropertyGroup `json:"fields"` +} + +func (p *ReearthPropertyItem) propertyItem(key property.SchemaGroupID) *property.InitializerItem { + if p == nil { + return nil + } + + if p.Groups != nil { + groups := make([]*property.InitializerGroup, 0, len(p.Groups)) + for _, g := range p.Groups { + if g == nil { + continue + } + if g2 := g.propertyGroup(); g2 != nil { + groups = append(groups, g2) + } + } + + return &property.InitializerItem{ + SchemaItem: key, + Groups: groups, + } + } + + var fields []*property.InitializerField + if p.Fields != nil { + fields = make([]*property.InitializerField, 0, len(p.Fields)) + for k, f := range p.Fields { + if f2 := f.propertyField(k); f2 != nil { + fields = append(fields, f2) + } + } + } + + return &property.InitializerItem{ + SchemaItem: key, + Fields: fields, + } +} + +type ReearthPropertyGroup map[property.FieldID]*ReearthPropertyField + +func (p ReearthPropertyGroup) propertyGroup() *property.InitializerGroup { + if p == nil || len(p) == 0 { + return nil + } + + var fields []*property.InitializerField + fields = make([]*property.InitializerField, 0, len(p)) + for k, f := range p { + if f2 := f.propertyField(k); f2 != nil { + fields = append(fields, f2) + } + } + + return &property.InitializerGroup{ + Fields: fields, + } +} + +type ReearthPropertyField struct { + Type string `json:"type"` + Links []*ReearthPropertyLink `json:"links"` + Value interface{} `json:"value"` +} + +func (f *ReearthPropertyField) propertyField(key property.FieldID) *property.InitializerField { + if f == nil || f.Type == "" { + return nil + } + + v := property.ValueType(f.Type).ValueFrom(f.Value) + if v == nil { + return nil + } + + var links []*property.InitializerLink + if len(links) > 0 { + links = make([]*property.InitializerLink, 0, len(f.Links)) + for _, l := range f.Links { + links = append(links, &property.InitializerLink{ + Dataset: l.Dataset, + Schema: l.Schema, + Field: l.Field, + }) + } + } + + return &property.InitializerField{ + Field: key, + Type: v.Type(), + Value: v, + Links: links, + } +} + +type ReearthPropertyLink struct { + Dataset *property.DatasetID `json:"dataset"` + Schema property.DatasetSchemaID `json:"schema"` + Field property.DatasetFieldID `json:"field"` +} diff --git a/server/pkg/layer/decoding/reearth_test.go b/server/pkg/layer/decoding/reearth_test.go new file mode 100644 index 000000000..9179c5c3b --- /dev/null +++ b/server/pkg/layer/decoding/reearth_test.go @@ -0,0 +1,210 @@ +package decoding + +import ( + "encoding/json" + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Decoder = &ReearthDecoder{} + +func TestReearthDecoder_Decode(t *testing.T) { + sid := layer.NewSceneID() + dsid := layer.NewDatasetSchemaID() + did := layer.NewDatasetID() + reearthjson := `{ + "reearth": 1, + "layers": [ + { + "plugin": "reearth", + "extension": "marker", + "name": "ABC", + "infobox": { + "blocks": [ + { + "plugin": "reearth", + "extension": "textblock" + } + ] + }, + "property": { + "default": { + "fields": { + "latlng": { + "type": "latlng", + "value": { + "lat": 1, + "lng": 2 + } + } + } + } + }, + "layers": [ + { + "name": "abc", + "isVisible": true, + "linkedDataset": "` + did.String() + `", + "plugin": "reearth", + "extension": "marker", + "property": { + "hoge": { + "groups": [ + { + "foobar": { + "type": "string", + "value": "bar" + } + }, + { + "foobar": { + "type": "string", + "value": "foo" + } + } + ] + } + } + } + ], + "isVisible": false, + "linkedDatasetSchema": "` + dsid.String() + `" + } + ] + }` + + p := NewReearthDecoder(json.NewDecoder(strings.NewReader(reearthjson)), sid) + result, err := p.Decode() + + assert.NoError(t, err) + assert.Equal(t, 2, len(result.Layers)) // 2 layers + assert.Equal(t, 4, len(result.Properties)) // 3 properties for 2 layers, 1 infobox, and 1 infobox field + + tr := true + f := false + + // root layer + rootLayer := result.Layers.Group(result.Root.LayerAt(0)) + assert.Equal(t, (&layer.Initializer{ + ID: rootLayer.IDRef(), + Plugin: layer.OfficialPluginID.Ref(), + Extension: layer.PluginExtensionID("marker").Ref(), + PropertyID: rootLayer.Property().Ref(), + Name: "ABC", + Infobox: &layer.InitializerInfobox{ + PropertyID: rootLayer.Infobox().Property().Ref(), + Fields: []*layer.InitializerInfoboxField{ + { + ID: rootLayer.Infobox().FieldAt(0).ID().Ref(), + Plugin: layer.OfficialPluginID, + Extension: layer.PluginExtensionID("textblock"), + PropertyID: rootLayer.Infobox().FieldAt(0).Property().Ref(), + }, + }, + }, + LayerIDs: rootLayer.Layers().Layers(), + IsVisible: &f, + LinkedDatasetSchema: &dsid, + }).MustBeLayer(sid).RootLayer(), rootLayer) + + // second layer + secondLayer := result.Layers.Item(rootLayer.Layers().LayerAt(0)) + assert.Equal(t, (&layer.Initializer{ + ID: secondLayer.IDRef(), + Plugin: layer.OfficialPluginID.Ref(), + Extension: layer.PluginExtensionID("marker").Ref(), + PropertyID: secondLayer.Property().Ref(), + Name: "abc", + IsVisible: &tr, + LinkedDataset: &did, + }).MustBeLayer(sid).RootLayer(), secondLayer) + + // property of root layer + prop := result.Properties[*rootLayer.Property()] + assert.Equal( + t, + (&property.Initializer{ + ID: prop.ID().Ref(), + Schema: property.MustSchemaID("reearth/marker"), + Items: []*property.InitializerItem{ + { + ID: prop.Items()[0].ID().Ref(), + SchemaItem: property.SchemaGroupID("default"), + Fields: []*property.InitializerField{ + { + Field: property.FieldID("latlng"), + Type: property.ValueTypeLatLng, + Value: property.ValueTypeLatLng.ValueFrom(property.LatLng{Lat: 1, Lng: 2}), + }, + }, + }, + }, + }).MustBeProperty(sid), + prop, + ) + + // property of infobox of root layer + prop = result.Properties[rootLayer.Infobox().Property()] + assert.Equal( + t, + (&property.Initializer{ + ID: rootLayer.Infobox().PropertyRef(), + Schema: property.MustSchemaID("reearth/infobox"), + }).MustBeProperty(sid), + prop, + ) + + // property of infobox field of root layer + prop = result.Properties[rootLayer.Infobox().FieldAt(0).Property()] + assert.Equal( + t, + (&property.Initializer{ + ID: rootLayer.Infobox().FieldAt(0).PropertyRef(), + Schema: property.MustSchemaID("reearth/textblock"), + }).MustBeProperty(sid), + prop, + ) + + // property of second layer + prop = result.Properties[*secondLayer.Property()] + assert.Equal( + t, + (&property.Initializer{ + ID: prop.ID().Ref(), + Schema: property.MustSchemaID("reearth/marker"), + Items: []*property.InitializerItem{ + { + ID: prop.Items()[0].ID().Ref(), + SchemaItem: property.SchemaGroupID("hoge"), + Groups: []*property.InitializerGroup{ + { + ID: property.ToGroupList(prop.Items()[0]).GroupAt(0).IDRef(), + Fields: []*property.InitializerField{ + { + Field: property.FieldID("foobar"), + Type: property.ValueTypeString, + Value: property.ValueTypeString.ValueFrom("bar"), + }, + }, + }, + { + ID: property.ToGroupList(prop.Items()[0]).GroupAt(1).IDRef(), + Fields: []*property.InitializerField{ + { + Field: property.FieldID("foobar"), + Type: property.ValueTypeString, + Value: property.ValueTypeString.ValueFrom("foo"), + }, + }, + }, + }, + }, + }, + }).MustBeProperty(sid), + prop, + ) +} diff --git a/server/pkg/layer/decoding/shapetest/point.shp b/server/pkg/layer/decoding/shapetest/point.shp new file mode 100644 index 000000000..310419cbd Binary files /dev/null and b/server/pkg/layer/decoding/shapetest/point.shp differ diff --git a/server/pkg/layer/decoding/shapetest/polygon.shp b/server/pkg/layer/decoding/shapetest/polygon.shp new file mode 100644 index 000000000..624a9db63 Binary files /dev/null and b/server/pkg/layer/decoding/shapetest/polygon.shp differ diff --git a/server/pkg/layer/decoding/shapetest/polyline.shp b/server/pkg/layer/decoding/shapetest/polyline.shp new file mode 100644 index 000000000..938bc5acc Binary files /dev/null and b/server/pkg/layer/decoding/shapetest/polyline.shp differ diff --git a/server/pkg/layer/decoding/shapetest/shapes.zip b/server/pkg/layer/decoding/shapetest/shapes.zip new file mode 100644 index 000000000..0e20d65f5 Binary files /dev/null and b/server/pkg/layer/decoding/shapetest/shapes.zip differ diff --git a/server/pkg/layer/decoding/shp.go b/server/pkg/layer/decoding/shp.go new file mode 100644 index 000000000..652fd2e5f --- /dev/null +++ b/server/pkg/layer/decoding/shp.go @@ -0,0 +1,100 @@ +package decoding + +import ( + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/shp" +) + +type ShapeReader interface { + Next() bool + Shape() (int, shp.Shape) + Err() error +} +type ShapeDecoder struct { + reader ShapeReader + sceneId layer.SceneID +} + +func NewShapeDecoder(r ShapeReader, s layer.SceneID) *ShapeDecoder { + return &ShapeDecoder{ + reader: r, + sceneId: s, + } +} + +func (shd *ShapeDecoder) getLayer(t string, coords interface{}) (*layer.Item, *property.Property, error) { + var p *property.Property + var l *layer.Item + var ex layer.PluginExtensionID + var err error + p, err = createProperty(t, coords, shd.sceneId, nil, "") + if err != nil { + return nil, nil, err + } + ex = extensions[t] + l, err = layer. + NewItem(). + NewID(). + Scene(shd.sceneId). + Property(p.IDRef()). + Extension(&ex). + Plugin(&layer.OfficialPluginID). + Build() + if err != nil { + return nil, nil, err + } + return l, p, nil +} +func (shd *ShapeDecoder) pointsToCoords(pl []shp.Point) []property.LatLngHeight { + var ls []property.LatLngHeight + for _, p := range pl { + ls = append(ls, property.LatLngHeight{ + Lat: p.Y, + Lng: p.X, + }) + } + return ls +} + +func (shd *ShapeDecoder) Decode() (Result, error) { + lg, err := layer.NewGroup().NewID().Scene(shd.sceneId).Name("ShapeFile").Build() + if err != nil { + return Result{}, err + } + var properties property.Map + var layers layer.Map + for shd.reader.Next() { + _, shape := shd.reader.Shape() + var li *layer.Item + var p *property.Property + point, okPoint := shape.(*shp.Point) + polyline, okPolyLine := shape.(*shp.PolyLine) + polygon, okPolygon := shape.(*shp.Polygon) + if okPoint { + li, p, err = shd.getLayer("Point", property.LatLng{ + Lat: point.Y, + Lng: point.X, + }) + } + if okPolyLine { + li, p, err = shd.getLayer("Polyline", shd.pointsToCoords(polyline.Points)) + } + if okPolygon { + li, p, err = shd.getLayer("Polygon", append(make([][]property.LatLngHeight, 1), shd.pointsToCoords(polygon.Points))) + } + if err != nil { + return Result{}, err + } + if li != nil { + var l layer.Layer = li + lg.Layers().AddLayer(l.ID(), -1) + layers = layers.Add(&l) + } + if p != nil { + properties = properties.Add(p) + } + } + + return resultFrom(lg, layers, properties) +} diff --git a/server/pkg/layer/decoding/shp_test.go b/server/pkg/layer/decoding/shp_test.go new file mode 100644 index 000000000..d246c81f5 --- /dev/null +++ b/server/pkg/layer/decoding/shp_test.go @@ -0,0 +1,142 @@ +package decoding + +import ( + "os" + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/shp" + "github.com/stretchr/testify/assert" +) + +var _ Decoder = &ShapeDecoder{} +var _ ShapeReader = &shp.ZipReader{} +var _ ShapeReader = &shp.Reader{} + +type identityTestFunc func(*testing.T, [][]float64, []shp.Shape) +type shapeGetterFunc func(string, *testing.T) []shp.Shape +type testCaseData struct { + points [][]float64 + tester identityTestFunc +} + +var dataForReadTests = map[string]testCaseData{ + "shapetest/shapes.zip": { + points: [][]float64{ + {10, 10}, + {5, 5}, + {0, 10}, + }, + tester: testPoint, + }, + "shapetest/point.shp": { + points: [][]float64{ + {10, 10}, + {5, 5}, + {0, 10}, + }, + tester: testPoint, + }, + "shapetest/polyline.shp": { + points: [][]float64{ + {0, 0}, + {5, 5}, + {10, 10}, + {15, 15}, + {20, 20}, + {25, 25}, + }, + tester: testPolyLine, + }, + "shapetest/polygon.shp": { + points: [][]float64{ + {0, 0}, + {0, 5}, + {5, 5}, + {5, 0}, + {0, 0}, + }, + tester: testPolygon, + }, +} + +func testPoint(t *testing.T, points [][]float64, shapes []shp.Shape) { + for n, s := range shapes { + p, ok := s.(*shp.Point) + if !ok { + t.Fatal("Failed to type assert.") + } + assert.Equal(t, []float64{p.X, p.Y}, points[n]) + } +} + +func testPolyLine(t *testing.T, points [][]float64, shapes []shp.Shape) { + for n, s := range shapes { + p, ok := s.(*shp.PolyLine) + if !ok { + t.Fatal("Failed to type assert.") + } + for k, point := range p.Points { + assert.Equal(t, points[n*3+k], []float64{point.X, point.Y}) + } + } +} + +func testPolygon(t *testing.T, points [][]float64, shapes []shp.Shape) { + for n, s := range shapes { + p, ok := s.(*shp.Polygon) + if !ok { + t.Fatal("Failed to type assert.") + } + for k, point := range p.Points { + assert.Equal(t, points[n*3+k], []float64{point.X, point.Y}) + } + } +} + +func TestSHPReadZip(t *testing.T) { + testshapeIdentity(t, "shapetest/shapes.zip", getShapesFromFile) +} + +func TestSHPReadPoint(t *testing.T) { + testshapeIdentity(t, "shapetest/point.shp", getShapesFromFile) +} + +func TestSHPReadPolyLine(t *testing.T) { + testshapeIdentity(t, "shapetest/polyline.shp", getShapesFromFile) +} + +func TestSHPReadPolygon(t *testing.T) { + testshapeIdentity(t, "shapetest/polygon.shp", getShapesFromFile) +} + +func testshapeIdentity(t *testing.T, prefix string, getter shapeGetterFunc) { + shapes := getter(prefix, t) + d := dataForReadTests[prefix] + d.tester(t, d.points, shapes) +} + +func getShapesFromFile(filename string, t *testing.T) (shapes []shp.Shape) { + var reader ShapeReader + var err error + osr, err := os.Open(filename) + assert.NoError(t, err) + if strings.HasSuffix(filename, ".shp") { + reader, err = shp.ReadFrom(osr) + } else { + reader, err = shp.ReadZipFrom(osr) + } + if err != nil { + t.Fatal("Failed to open shapefile: " + filename + " (" + err.Error() + ")") + } + + for reader.Next() { + _, shape := reader.Shape() + shapes = append(shapes, shape) + } + if reader.Err() != nil { + t.Errorf("error while getting shapes for %s: %v", filename, reader.Err()) + } + + return shapes +} diff --git a/server/pkg/layer/encoding/common.go b/server/pkg/layer/encoding/common.go new file mode 100644 index 000000000..292b6f4c0 --- /dev/null +++ b/server/pkg/layer/encoding/common.go @@ -0,0 +1,44 @@ +package encoding + +import ( + "image/color" + "strconv" + "strings" + + "gopkg.in/go-playground/colors.v1" +) + +func getColor(str string) *color.RGBA { + if len(str) == 0 { + return nil + } + + cs := str + a := "" + + if str[0] == '#' { + if len(str) == 5 { + cs = str[:len(str)-1] + a = strings.Repeat(str[len(str)-1:], 2) + } else if len(str) == 9 { + cs = str[:len(str)-2] + a = str[len(str)-2:] + } + } + + b, err := colors.Parse(cs) + if err != nil || b == nil { + return nil + } + + c := b.ToRGBA() + var alpha uint8 + if a != "" { + a2, _ := strconv.ParseUint(a, 16, 8) + alpha = uint8(a2) + } else { + alpha = uint8(c.A * 255) + } + + return &color.RGBA{R: c.R, G: c.G, B: c.B, A: alpha} +} diff --git a/server/pkg/layer/encoding/common_test.go b/server/pkg/layer/encoding/common_test.go new file mode 100644 index 000000000..cf356e261 --- /dev/null +++ b/server/pkg/layer/encoding/common_test.go @@ -0,0 +1,15 @@ +package encoding + +import ( + "image/color" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGetColor(t *testing.T) { + assert.Equal(t, &color.RGBA{R: 255, G: 255, B: 255, A: 255}, getColor("#ffffff")) + assert.Equal(t, &color.RGBA{R: 255, G: 255, B: 255, A: 255}, getColor("#fff")) + assert.Equal(t, &color.RGBA{R: 255, G: 255, B: 255, A: 170}, getColor("#fffa")) + assert.Equal(t, &color.RGBA{R: 255, G: 0, B: 0, A: 170}, getColor("#ff0000aa")) +} diff --git a/server/pkg/layer/encoding/czml.go b/server/pkg/layer/encoding/czml.go new file mode 100644 index 000000000..23b0d1fe0 --- /dev/null +++ b/server/pkg/layer/encoding/czml.go @@ -0,0 +1,194 @@ +package encoding + +import ( + "encoding/json" + "errors" + "io" + + "github.com/reearth/reearth-backend/pkg/czml" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" +) + +type CZMLEncoder struct { + writer io.Writer +} + +func NewCZMLEncoder(w io.Writer) *CZMLEncoder { + return &CZMLEncoder{ + writer: w, + } +} + +func (*CZMLEncoder) MimeType() string { + return "application/json" +} + +func (e *CZMLEncoder) stringToCZMLColor(s string) *czml.Color { + c := getColor(s) + if c == nil { + return nil + } + return &czml.Color{RGBA: []int64{int64(c.R), int64(c.G), int64(c.B), int64(c.A)}} +} + +func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feature, error) { + if li.PluginID == nil || !layer.OfficialPluginID.Equal(*li.PluginID) { + return nil, nil + } + + feature := czml.Feature{ + Id: li.Original.String(), + Name: li.Name, + } + + switch li.ExtensionID.String() { + case "marker": + var position czml.Position + point := czml.Point{} + if f := li.Property.Field("location").Value().ValueLatLng(); f != nil { + position = czml.Position{CartographicDegrees: []float64{(*f).Lng, (*f).Lat}} + } else { + return nil, errors.New("invalid value type") + } + + if f := li.Property.Field("height").Value().ValueNumber(); f != nil { + position.CartographicDegrees = append(position.CartographicDegrees, *f) + } + + if f := li.Property.Field("pointColor").Value().ValueString(); f != nil { + point.Color = *f + } + + if f := li.Property.Field("pointSize").Value().ValueNumber(); f != nil { + point.PixelSize = *f + } + + feature.Position = &position + feature.Point = &point + case "polygon": + polygon := czml.Polygon{} + + if f := li.Property.Field("polygon").Value().ValuePolygon(); f != nil && len(*f) > 0 { + // CZML polygon does not support multi inner rings + for _, l := range (*f)[0] { + polygon.Positions.CartographicDegrees = append( + polygon.Positions.CartographicDegrees, + []float64{l.Lng, l.Lat, l.Height}..., + ) + } + } else { + // polygon is required + return nil, errors.New("invalid value type") + } + + if f := li.Property.Field("fill").Value().ValueBool(); f != nil { + polygon.Fill = *f + } + + if f := li.Property.Field("stroke").Value().ValueBool(); f != nil { + polygon.Stroke = *f + } + + if f := li.Property.Field("fillColor").Value().ValueString(); f != nil { + if c := e.stringToCZMLColor(*f); c != nil { + polygon.Material = &czml.Material{SolidColor: &czml.SolidColor{Color: c}} + } + } + + if f := li.Property.Field("strokeColor").Value().ValueString(); f != nil { + if strokeColor := e.stringToCZMLColor(*f); strokeColor != nil { + polygon.StrokeColor = strokeColor + } + } + + if f := li.Property.Field("strokeWidth").Value().ValueNumber(); f != nil { + polygon.StrokeWidth = *f + } + + feature.Polygon = &polygon + case "polyline": + polyline := czml.Polyline{Positions: czml.Position{}} + + if f := li.Property.Field("coordinates").Value().ValueCoordinates(); f != nil { + for _, l := range *f { + polyline.Positions.CartographicDegrees = append( + polyline.Positions.CartographicDegrees, + l.Lng, l.Lat, l.Height, + ) + } + } else { + return nil, errors.New("invalid value type") + } + + if f := li.Property.Field("strokeColor").Value().ValueString(); f != nil { + if strokeColor := e.stringToCZMLColor(*f); strokeColor != nil { + polyline.Material = &czml.Material{ + PolylineOutline: &czml.PolylineOutline{Color: strokeColor}, + } + } + } + + if f := li.Property.Field("strokeWidth").Value().ValueNumber(); f != nil { + polyline.Width = *f + } + + feature.Polyline = &polyline + } + + return &feature, nil +} + +func (e *CZMLEncoder) encodeLayerGroup(li *merging.SealedLayerGroup) ([]*czml.Feature, error) { + groupFeature := czml.Feature{ + Id: "", + Name: "", + } + groupFeature.Id = "document" + groupFeature.Name = li.Name + res := []*czml.Feature{} + res = append(res, &groupFeature) + + for _, ch := range li.Children { + sl := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: ch.Common().Merged, + Property: ch.Common().Property, + Infobox: ch.Common().Infobox, + }, + } + l, err := e.encodeSingleLayer(&sl) + if err != nil { + return nil, err + } + if l != nil { + res = append(res, l) + } + } + + return res, nil +} + +func (e *CZMLEncoder) Encode(layer merging.SealedLayer) error { + var res []*czml.Feature + var err error + + if i, ok := layer.(*merging.SealedLayerItem); ok { + feature, err := e.encodeSingleLayer(i) + if err != nil { + return err + } + res = append(res, feature) + + } else if g, ok := layer.(*merging.SealedLayerGroup); ok { + res, err = e.encodeLayerGroup(g) + if err != nil { + return err + } + } + + if err := json.NewEncoder(e.writer).Encode(res); err != nil { + return err + } + return nil +} diff --git a/server/pkg/layer/encoding/czml_test.go b/server/pkg/layer/encoding/czml_test.go new file mode 100644 index 000000000..35824157b --- /dev/null +++ b/server/pkg/layer/encoding/czml_test.go @@ -0,0 +1,268 @@ +package encoding + +import ( + "bytes" + "encoding/json" + "testing" + + "github.com/reearth/reearth-backend/pkg/czml" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Encoder = (*CZMLEncoder)(nil) + +func TestCZMLEncoder_Encode(t *testing.T) { + lid := layer.NewID() + sid := layer.NewSceneID() + iid := property.NewItemID() + + tests := []struct { + name string + target merging.SealedLayer + want []*czml.Feature + }{ + { + name: "marker", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Name: "test", + Scene: sid, + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("marker").Ref(), + }, + Property: &property.Sealed{ + Original: property.NewID().Ref(), + Items: []*property.SealedItem{ + { + Original: &iid, + SchemaGroup: property.SchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: property.FieldID("location"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeLatLng, + nil, + property.ValueTypeLatLng.ValueFrom(property.LatLng{Lat: 4.4, Lng: 53.4}), + ), + }, + { + ID: property.FieldID("height"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(34), + ), + }, + { + ID: property.FieldID("pointColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#7fff00ff"), + ), + }, + { + ID: property.FieldID("pointSize"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(2.4), + ), + }, + }, + }, + }, + }, + }, + }, + want: []*czml.Feature{ + { + Id: lid.String(), + Name: "test", + Position: &czml.Position{CartographicDegrees: []float64{53.4, 4.4, 34}}, + Point: &czml.Point{ + Color: "#7fff00ff", + PixelSize: float64(2.4), + }, + }, + }, + }, + { + name: "polygon", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Name: "test", + Scene: sid, + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polygon").Ref(), + }, + Property: &property.Sealed{ + Original: property.NewID().Ref(), + Items: []*property.SealedItem{ + { + Original: &iid, + SchemaGroup: property.SchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: property.FieldID("polygon"), + Val: property.NewValueAndDatasetValue( + property.ValueTypePolygon, + nil, + property.ValueTypePolygon.ValueFrom( + property.Polygon{property.Coordinates{ + {Lat: 3.4, Lng: 5.34, Height: 100}, + {Lat: 45.4, Lng: 2.34, Height: 100}, + {Lat: 34.66, Lng: 654.34, Height: 100}, + }}, + ), + ), + }, + { + ID: property.FieldID("fill"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeBool, + nil, + property.ValueTypeBool.ValueFrom(true), + ), + }, + { + ID: property.FieldID("fillColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff000000"), + ), + }, + { + ID: property.FieldID("stroke"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeBool, + nil, + property.ValueTypeBool.ValueFrom(true), + ), + }, + { + ID: property.FieldID("strokeColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff554555"), + ), + }, + { + ID: property.FieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + }, + }, + }, + }, + }, + }, + want: []*czml.Feature{ + { + Id: lid.String(), + Name: "test", + Polygon: &czml.Polygon{ + Positions: czml.Position{CartographicDegrees: []float64{5.34, 3.4, 100, 2.34, 45.4, 100, 654.34, 34.66, 100}}, + Fill: true, + Material: &czml.Material{ + SolidColor: &czml.SolidColor{Color: &czml.Color{RGBA: []int64{255, 0, 0, 0}}}, + }, + Stroke: true, + StrokeColor: &czml.Color{RGBA: []int64{255, 85, 69, 85}}, + StrokeWidth: 3, + }, + }, + }, + }, + { + name: "polyline", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Name: "test", + Scene: sid, + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polyline").Ref(), + }, + Property: &property.Sealed{ + Original: property.NewID().Ref(), + Items: []*property.SealedItem{ + { + Original: &iid, + SchemaGroup: property.SchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: property.FieldID("coordinates"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeCoordinates, + nil, + property.ValueTypeCoordinates.ValueFrom(property.Coordinates{ + {Lat: 3.4, Lng: 5.34, Height: 100}, + {Lat: 45.4, Lng: 2.34, Height: 100}, + {Lat: 34.66, Lng: 654.34, Height: 100}, + }), + ), + }, + { + ID: property.FieldID("strokeColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff224222"), + ), + }, + { + ID: property.FieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + }, + }, + }, + }, + }, + }, + want: []*czml.Feature{ + { + Id: lid.String(), + Name: "test", + Polyline: &czml.Polyline{ + Positions: czml.Position{CartographicDegrees: []float64{5.34, 3.4, 100, 2.34, 45.4, 100, 654.34, 34.66, 100}}, + Material: &czml.Material{PolylineOutline: &czml.PolylineOutline{ + Color: &czml.Color{RGBA: []int64{255, 34, 66, 34}}, + }}, + Width: 3, + }, + }, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + expected, _ := json.Marshal(tt.want) + writer := bytes.Buffer{} + assert.NoError(t, NewCZMLEncoder(&writer).Encode(tt.target)) + assert.Equal(t, string(expected)+"\n", writer.String()) + }) + } +} diff --git a/server/pkg/layer/encoding/encoder.go b/server/pkg/layer/encoding/encoder.go new file mode 100644 index 000000000..0a0d805cc --- /dev/null +++ b/server/pkg/layer/encoding/encoder.go @@ -0,0 +1,27 @@ +package encoding + +import ( + "io" + + "github.com/reearth/reearth-backend/pkg/layer/merging" +) + +var encoders = map[string]func(w io.Writer) Encoder{ + "kml": func(w io.Writer) Encoder { return NewKMLEncoder(w) }, + "geojson": func(w io.Writer) Encoder { return NewGeoJSONEncoder(w) }, + "czml": func(w io.Writer) Encoder { return NewCZMLEncoder(w) }, + "shp": func(w io.Writer) Encoder { return NewSHPEncoder(w) }, +} + +type Encoder interface { + Encode(merging.SealedLayer) error + MimeType() string +} + +func EncoderFromExt(ext string, w io.Writer) Encoder { + e := encoders[ext] + if e == nil { + return nil + } + return e(w) +} diff --git a/server/pkg/layer/encoding/exporter.go b/server/pkg/layer/encoding/exporter.go new file mode 100644 index 000000000..01773256b --- /dev/null +++ b/server/pkg/layer/encoding/exporter.go @@ -0,0 +1,51 @@ +package encoding + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" +) + +type Exporter struct { + Merger *merging.Merger + Sealer *merging.Sealer + Encoder Encoder +} + +func (e *Exporter) ExportLayerByID(ctx context.Context, l layer.ID) error { + if e == nil { + return nil + } + m, err := e.Merger.MergeLayerFromID(ctx, l, nil) + if err != nil { + return err + } + return e.Encode(ctx, m) +} + +func (e *Exporter) ExportLayer(ctx context.Context, l layer.Layer) error { + if e == nil { + return nil + } + m, err := e.Merger.MergeLayer(ctx, l, nil) + if err != nil { + return err + } + return e.Encode(ctx, m) +} + +func (e *Exporter) Encode(ctx context.Context, m merging.MergedLayer) error { + if e == nil { + return nil + } + s, err := e.Sealer.Seal(ctx, m) + if err != nil { + return err + } + err = e.Encoder.Encode(s) + if err != nil { + return err + } + return nil +} diff --git a/server/pkg/layer/encoding/geojson.go b/server/pkg/layer/encoding/geojson.go new file mode 100644 index 000000000..2940813d7 --- /dev/null +++ b/server/pkg/layer/encoding/geojson.go @@ -0,0 +1,165 @@ +package encoding + +import ( + "errors" + "io" + + geojson "github.com/paulmach/go.geojson" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" +) + +type GeoJSONEncoder struct { + writer io.Writer +} + +func NewGeoJSONEncoder(w io.Writer) *GeoJSONEncoder { + return &GeoJSONEncoder{ + writer: w, + } +} + +func (*GeoJSONEncoder) MimeType() string { + return "application/json" +} + +func (e *GeoJSONEncoder) polygonToFloat(p property.Polygon) [][][]float64 { + var res [][][]float64 + for _, c := range p { + t := e.coordsToFloat(c) + res = append(res, t) + } + return res +} + +func (e *GeoJSONEncoder) coordsToFloat(c property.Coordinates) [][]float64 { + var res [][]float64 + for _, l := range c { + t := []float64{} + t = append(t, []float64{l.Lng, l.Lat, l.Height}...) + res = append(res, t) + } + return res +} + +func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojson.Feature, error) { + if li == nil || li.PluginID == nil || !layer.OfficialPluginID.Equal(*li.PluginID) { + return nil, nil + } + + var res *geojson.Feature + + switch li.ExtensionID.String() { + case "marker": + var coords []float64 + + if f := li.Property.Field("location").Value().ValueLatLng(); f != nil { + coords = []float64{(*f).Lng, (*f).Lat} + } else { + return nil, errors.New("invalid value type") + } + + if height := li.Property.Field("height").Value().ValueNumber(); height != nil { + coords = append(coords, *height) + } + + res = geojson.NewFeature(geojson.NewPointGeometry(coords)) + + if f := li.Property.Field("pointColor").Value().ValueString(); f != nil { + res.SetProperty("marker-color", *f) + } + case "polygon": + if f := li.Property.Field("polygon").Value().ValuePolygon(); f != nil { + res = geojson.NewFeature(geojson.NewPolygonGeometry(e.polygonToFloat(*f))) + } else { + return nil, errors.New("invalid value type") + } + + if f := li.Property.Field("fillColor").Value().ValueString(); f != nil { + res.SetProperty("fill", *f) + } + + if f := li.Property.Field("strokeColor").Value().ValueString(); f != nil { + res.SetProperty("stroke", *f) + } + + if f := li.Property.Field("strokeWidth").Value().ValueNumber(); f != nil { + res.SetProperty("stroke-width", *f) + } + case "polyline": + if f := li.Property.Field("coordinates").Value().ValueCoordinates(); f != nil { + res = geojson.NewFeature(geojson.NewLineStringGeometry(e.coordsToFloat(*f))) + } else { + return nil, errors.New("invalid value type") + } + + if f := li.Property.Field("strokeColor").Value().ValueString(); f != nil { + res.SetProperty("stroke", *f) + } + + if f := li.Property.Field("strokeWidth").Value().ValueNumber(); f != nil { + res.SetProperty("stroke-width", *f) + } + } + + if res != nil { + res.SetProperty("name", li.Name) + } + return res, nil +} + +func (e *GeoJSONEncoder) encodeLayerGroup(li *merging.SealedLayerGroup) (*geojson.FeatureCollection, error) { + layers := geojson.NewFeatureCollection() + for _, ch := range li.Flatten() { + sl := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: ch.Common().Merged, + Property: ch.Common().Property, + Infobox: ch.Common().Infobox, + }, + } + l, err := e.encodeSingleLayer(&sl) + if err != nil { + return nil, err + } + if l != nil { + layers.AddFeature(l) + } + } + return layers, nil +} + +func (e *GeoJSONEncoder) Encode(layer merging.SealedLayer) error { + var data []byte + if i, ok := layer.(*merging.SealedLayerItem); ok { + geo, err := e.encodeSingleLayer(i) + if err != nil { + return err + } + if geo != nil { + data, err = geo.MarshalJSON() + if err != nil { + return err + } + } + } else if g, ok := layer.(*merging.SealedLayerGroup); ok { + fc, err := e.encodeLayerGroup(g) + if err != nil { + return err + } + if fc != nil { + data, err = fc.MarshalJSON() + if err != nil { + return err + } + } + } + if len(data) > 0 { + _, err := e.writer.Write(data) + if err != nil { + return err + } + } + return nil +} diff --git a/server/pkg/layer/encoding/geojson_test.go b/server/pkg/layer/encoding/geojson_test.go new file mode 100644 index 000000000..dfabcfd19 --- /dev/null +++ b/server/pkg/layer/encoding/geojson_test.go @@ -0,0 +1,226 @@ +package encoding + +import ( + "bytes" + "testing" + + geojson "github.com/paulmach/go.geojson" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Encoder = (*GeoJSONEncoder)(nil) + +func TestGeoJSONEncoder_Encode(t *testing.T) { + tests := []struct { + name string + target merging.SealedLayer + want func() *geojson.Feature + }{ + { + name: "point", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: layer.NewID(), + Scene: layer.NewSceneID(), + Name: "test", + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("marker").Ref(), + }, + Property: &property.Sealed{ + Original: property.NewID().Ref(), + Items: []*property.SealedItem{ + { + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: property.FieldID("location"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeLatLng, + nil, + property.ValueTypeLatLng.ValueFrom(property.LatLng{Lat: 4.4, Lng: 53.4}), + ), + }, + { + ID: property.FieldID("pointColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#7fff00ff"), + ), + }, + { + ID: property.FieldID("height"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(34), + ), + }, + }, + }, + }, + }, + }, + }, + want: func() *geojson.Feature { + f := geojson.NewFeature(geojson.NewPointGeometry([]float64{53.4, 4.4, 34})) + f.SetProperty("marker-color", "#7fff00ff") + f.SetProperty("name", "test") + return f + }, + }, + { + name: "polygon", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: layer.NewID(), + Scene: layer.NewSceneID(), + Name: "test", + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polygon").Ref(), + }, + Property: &property.Sealed{ + Original: property.NewID().Ref(), + Items: []*property.SealedItem{ + { + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: property.FieldID("polygon"), + Val: property.NewValueAndDatasetValue( + property.ValueTypePolygon, + nil, + property.ValueTypePolygon.ValueFrom(property.Polygon{property.Coordinates{ + {Lat: 3.4, Lng: 5.34, Height: 100}, + {Lat: 45.4, Lng: 2.34, Height: 100}, + {Lat: 34.66, Lng: 654.34, Height: 100}, + }}), + ), + }, + { + ID: property.FieldID("fillColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#7c3b3b"), + ), + }, + { + ID: property.FieldID("strokeColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff3343"), + ), + }, + { + ID: property.FieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + { + ID: property.FieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + }, + }, + }, + }, + }, + }, + want: func() *geojson.Feature { + expected := geojson.NewFeature(geojson.NewPolygonGeometry([][][]float64{{{5.34, 3.4, 100}, {2.34, 45.4, 100}, {654.34, 34.66, 100}}})) + expected.SetProperty("name", "test") + expected.SetProperty("fill", "#7c3b3b") + expected.SetProperty("stroke", "#ff3343") + expected.SetProperty("stroke-width", 3) + return expected + }, + }, + { + name: "polyline", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: layer.NewID(), + Scene: layer.NewSceneID(), + Name: "test", + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polyline").Ref(), + }, + Property: &property.Sealed{ + Original: property.NewID().Ref(), + Items: []*property.SealedItem{ + { + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: property.FieldID("coordinates"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeCoordinates, + nil, + property.ValueTypeCoordinates.ValueFrom(property.Coordinates{ + {Lat: 3.4, Lng: 5.34, Height: 100}, + {Lat: 45.4, Lng: 2.34, Height: 100}, + {Lat: 34.66, Lng: 654.34, Height: 100}, + }), + ), + }, + { + ID: property.FieldID("strokeColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff3343"), + ), + }, + { + ID: property.FieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + }, + }, + }, + }, + }, + }, + want: func() *geojson.Feature { + expected := geojson.NewFeature(geojson.NewLineStringGeometry([][]float64{{5.34, 3.4, 100}, {2.34, 45.4, 100}, {654.34, 34.66, 100}})) + expected.SetProperty("name", "test") + expected.SetProperty("stroke", "#ff3343") + expected.SetProperty("stroke-width", 3) + return expected + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + expected, _ := tt.want().MarshalJSON() + writer := bytes.Buffer{} + assert.NoError(t, NewGeoJSONEncoder(&writer).Encode(tt.target)) + assert.Equal(t, string(expected), writer.String()) + }) + } +} diff --git a/server/pkg/layer/encoding/kml.go b/server/pkg/layer/encoding/kml.go new file mode 100644 index 000000000..d3f0f25a5 --- /dev/null +++ b/server/pkg/layer/encoding/kml.go @@ -0,0 +1,301 @@ +package encoding + +import ( + "errors" + "io" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + kml "github.com/twpayne/go-kml" +) + +type KMLEncoder struct { + writer io.Writer + styles []*kml.SharedElement +} + +func NewKMLEncoder(w io.Writer) *KMLEncoder { + return &KMLEncoder{ + writer: w, + } +} + +func (*KMLEncoder) MimeType() string { + return "application/xml" +} + +// generates a composite string of layer name and id to be used as style tag id +func generateKMLStyleId(id string) string { + return id + "_style" +} + +func (e *KMLEncoder) getName(str string) *kml.SimpleElement { + return kml.Name(str) +} + +// encodes style features and return style kml element and used id +func (e *KMLEncoder) encodePointStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string) { + added := false + styleId := generateKMLStyleId(li.Original.String()) + iconStyle := kml.IconStyle() + + if f := li.Property.Field("image").Value().ValueURL(); f != nil { + iconStyle.Add(kml.Icon(kml.Href(f.String()))) + added = true + } + + if f := li.Property.Field("imageSize").Value().ValueNumber(); f != nil { + iconStyle.Add(kml.Scale(*f)) + added = true + } + + if f := li.Property.Field("pointColor").Value().ValueString(); f != nil { + if c := getColor(*f); c != nil { + iconStyle.Add(kml.Color(c)) + added = true + } + } + + if !added { + return nil, "" + } + + return kml.SharedStyle(styleId, iconStyle), styleId +} + +func (e *KMLEncoder) encodePolygonStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string) { + styleId := generateKMLStyleId(li.Original.String()) + + var polyStyle, lineStyle *kml.CompoundElement + + if f := li.Property.Field("fill").Value().ValueBool(); f != nil { + if polyStyle == nil { + polyStyle = kml.PolyStyle() + } + polyStyle.Add(kml.Fill(*f)) + } + + if f := li.Property.Field("fillColor").Value().ValueString(); f != nil { + if fillColor := getColor(*f); fillColor != nil { + if polyStyle == nil { + polyStyle = kml.PolyStyle() + } + polyStyle.Add(kml.Color(fillColor)) + } + } + + if f := li.Property.Field("stroke").Value().ValueBool(); f != nil { + if lineStyle == nil { + lineStyle = kml.LineStyle() + } + lineStyle.Add(kml.Outline(*f)) + } + + if f := li.Property.Field("strokeColor").Value().ValueString(); f != nil { + if strokeColor := getColor(*f); lineStyle != nil { + if lineStyle == nil { + lineStyle = kml.LineStyle() + } + lineStyle.Add(kml.Color(strokeColor)) + } + } + + if f := li.Property.Field("strokeWidth").Value().ValueNumber(); f != nil { + if lineStyle == nil { + lineStyle = kml.LineStyle() + } + lineStyle.Add(kml.Width(*f)) + } + + style := kml.SharedStyle(styleId) + if polyStyle != nil { + style.Add(polyStyle) + } + if lineStyle != nil { + style.Add(lineStyle) + } + return style, styleId +} + +func (e *KMLEncoder) encodePolylineStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string) { + styleId := generateKMLStyleId(li.Original.String()) + style := kml.SharedStyle(styleId) + var lineStyle *kml.CompoundElement + + if f := li.Property.Field("strokeColor").Value().ValueString(); f != nil { + if strokeColor := getColor(*f); strokeColor != nil { + if lineStyle == nil { + lineStyle = kml.LineStyle() + } + lineStyle.Add(kml.Color(strokeColor)) + } + } + + if f := li.Property.Field("strokeWidth").Value().ValueNumber(); f != nil { + if lineStyle == nil { + lineStyle = kml.LineStyle() + } + lineStyle.Add(kml.Width(*f)) + } + + if lineStyle != nil { + style.Add(lineStyle) + } + return style, styleId +} + +func (e *KMLEncoder) encodeStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string) { + switch li.ExtensionID.String() { + case "marker": + return e.encodePointStyle(li) + case "polygon": + return e.encodePolygonStyle(li) + case "polyline": + return e.encodePolylineStyle(li) + } + return nil, "" +} + +// encodes non style layer features +func (e *KMLEncoder) encodeLayerTag(li *merging.SealedLayerItem) (*kml.CompoundElement, error) { + if li.PluginID == nil || !layer.OfficialPluginID.Equal(*li.PluginID) { + return nil, nil + } + + var layerTag *kml.CompoundElement + + switch li.ExtensionID.String() { + case "marker": + c := kml.Coordinate{} + + if f := li.Property.Field("location").Value().ValueLatLng(); f != nil { + c.Lat = (*f).Lat + c.Lon = (*f).Lng + } else { + return nil, errors.New("invalid value type") + } + + if f := li.Property.Field("height").Value().ValueNumber(); f != nil { + c.Alt = *f + } + + layerTag = kml.Point().Add(kml.Coordinates(c)) + case "polygon": + layerTag = kml.Polygon() + // polygon := property.Polygon{} + if f := li.Property.Field("polygon").Value().ValuePolygon(); f != nil && len(*f) > 0 { + // by default, first polygon coords set is for outer boundaries... the second is for inner + for i, r := range *f { + var coords []kml.Coordinate + for _, c := range r { + coords = append(coords, kml.Coordinate{ + Lon: c.Lng, + Lat: c.Lat, + Alt: c.Height, + }) + } + if i == 0 { + layerTag = layerTag.Add(kml.OuterBoundaryIs(kml.LinearRing(kml.Coordinates(coords...)))) + } else { + layerTag = layerTag.Add(kml.InnerBoundaryIs(kml.LinearRing(kml.Coordinates(coords...)))) + } + } + } else { + return nil, errors.New("invalid value type") + } + case "polyline": + if f := li.Property.Field("coordinates").Value().ValueCoordinates(); f != nil && len(*f) > 0 { + coords := make([]kml.Coordinate, 0, len(*f)) + for _, c := range *f { + coords = append(coords, kml.Coordinate{ + Lon: c.Lng, + Lat: c.Lat, + Alt: c.Height, + }) + } + layerTag = kml.LineString().Add(kml.Coordinates(coords...)) + } else { + return nil, errors.New("invalid value type") + } + } + + placemark := kml.Placemark() + if len(li.Name) != 0 { + placemark.Add(e.getName(li.Name)) + } + placemark = placemark.Add(layerTag) + + return placemark, nil +} + +func (e *KMLEncoder) encodeLayerGroup(li *merging.SealedLayerGroup, parent *kml.CompoundElement) (*kml.CompoundElement, error) { + name := e.getName(li.Name) + if len(li.Name) != 0 { + parent.Add(name) + } + + for _, ch := range li.Children { + if g, ok := ch.(*merging.SealedLayerGroup); ok { + folder := kml.Folder() + folder, err := e.encodeLayerGroup(g, folder) + if err != nil { + return nil, err + } + parent = parent.Add(folder) + } else if i, ok := ch.(*merging.SealedLayerItem); ok { + placemark, err := e.encodeLayerTag(i) + if err != nil { + return nil, err + } else if placemark == nil { + return nil, nil + } + if style, styleId := e.encodeStyle(i); style != nil { + e.styles = append(e.styles, style) + placemark.Add(kml.StyleURL("#" + styleId)) + } + parent = parent.Add(placemark) + } + } + + return parent, nil +} + +func (e *KMLEncoder) Encode(layer merging.SealedLayer) (err error) { + var res *kml.CompoundElement + + if i, ok := layer.(*merging.SealedLayerItem); ok { + style, styleId := e.encodeStyle(i) + l, err := e.encodeLayerTag(i) + if err != nil { + return err + } + + if style != nil { + res = kml.KML(style) + res = res.Add(l) + l.Add(kml.StyleURL("#" + styleId)) + } else { + res = kml.KML(l) + } + } else if g, ok := layer.(*merging.SealedLayerGroup); ok { + doc := kml.Document() + doc, err := e.encodeLayerGroup(g, doc) + if err != nil { + return err + } + + if len(e.styles) > 0 { + for _, s := range e.styles { + doc.Add(s) + } + } + + res = kml.KML(doc) + } + + if err := res.WriteIndent(e.writer, "", " "); err != nil { + return err + } + + return nil +} diff --git a/server/pkg/layer/encoding/kml_test.go b/server/pkg/layer/encoding/kml_test.go new file mode 100644 index 000000000..a64c7f9cf --- /dev/null +++ b/server/pkg/layer/encoding/kml_test.go @@ -0,0 +1,302 @@ +package encoding + +import ( + "bytes" + "testing" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" + "github.com/twpayne/go-kml" +) + +var _ Encoder = (*KMLEncoder)(nil) + +func TestKMLEncoder_Encode(t *testing.T) { + lid := layer.MustID("01fmph48ykj1nd82r8e4znh6a6") + + tests := []struct { + name string + target merging.SealedLayer + want func() *kml.CompoundElement + }{ + { + name: "marker", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Scene: layer.NewSceneID(), + Name: "test", + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("marker").Ref(), + }, + Property: &property.Sealed{ + Original: property.NewID().Ref(), + Items: []*property.SealedItem{ + { + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: property.FieldID("location"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeLatLng, + nil, + property.ValueTypeLatLng.ValueFrom(property.LatLng{Lat: 4.4, Lng: 53.4}), + ), + }, + { + ID: property.FieldID("height"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(100), + ), + }, + { + ID: property.FieldID("imageSize"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(4), + ), + }, + { + ID: property.FieldID("image"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeURL, + nil, + property.ValueTypeURL.ValueFrom("http://maps.google.com/mapfiles/kml/pal4/icon28.png"), + ), + }, + { + ID: property.FieldID("pointColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#7fff00ff"), + ), + }, + }, + }, + }, + }, + }, + }, + want: func() *kml.CompoundElement { + k := kml.KML( + kml.SharedStyle( + "01fmph48ykj1nd82r8e4znh6a6_style", + kml.IconStyle( + kml.Icon(kml.Href("http://maps.google.com/mapfiles/kml/pal4/icon28.png")), + kml.Scale(4), + kml.Color(getColor("#7fff00ff")), + ), + ), + ) + k = k.Add( + kml.Placemark( + kml.Name("test"), + kml.Point(kml.Coordinates(kml.Coordinate{Lon: 53.4, Lat: 4.4, Alt: 100})), + kml.StyleURL("#01fmph48ykj1nd82r8e4znh6a6_style"), + ), + ) + return k + }, + }, + { + name: "polygon", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Scene: layer.NewSceneID(), + Name: "test", + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polygon").Ref(), + }, + Property: &property.Sealed{ + Original: property.NewID().Ref(), + Items: []*property.SealedItem{ + { + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: property.FieldID("polygon"), + Val: property.NewValueAndDatasetValue( + property.ValueTypePolygon, + nil, + property.ValueTypePolygon.ValueFrom(property.Polygon{property.Coordinates{ + property.LatLngHeight{Lat: 3.4, Lng: 5.34, Height: 100}, + property.LatLngHeight{Lat: 45.4, Lng: 2.34, Height: 100}, + property.LatLngHeight{Lat: 34.66, Lng: 654.34, Height: 100}, + }}), + ), + }, + { + ID: property.FieldID("fill"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeBool, + nil, + property.ValueTypeBool.ValueFrom(true), + ), + }, + { + ID: property.FieldID("fillColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff334353"), + ), + }, + { + ID: property.FieldID("stroke"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeBool, + nil, + property.ValueTypeBool.ValueFrom(true), + ), + }, + { + ID: property.FieldID("strokeColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff554555"), + ), + }, + { + ID: property.FieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + }, + }, + }, + }, + }, + }, + want: func() *kml.CompoundElement { + k := kml.KML( + kml.SharedStyle( + "01fmph48ykj1nd82r8e4znh6a6_style", + kml.PolyStyle( + kml.Fill(true), + kml.Color(getColor("#ff334353")), + ), + kml.LineStyle( + kml.Outline(true), + kml.Color(getColor("#ff554555")), + kml.Width(3), + ), + ), + ) + k = k.Add( + kml.Placemark(kml.Name("test"), + kml.Polygon(kml.OuterBoundaryIs(kml.LinearRing(kml.Coordinates( + kml.Coordinate{Lon: 5.34, Lat: 3.4, Alt: 100}, + kml.Coordinate{Lon: 2.34, Lat: 45.4, Alt: 100}, + kml.Coordinate{Lon: 654.34, Lat: 34.66, Alt: 100}, + )))), + kml.StyleURL("#01fmph48ykj1nd82r8e4znh6a6_style"), + ), + ) + return k + }, + }, + { + name: "polyline", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Scene: layer.NewSceneID(), + Name: "test", + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polyline").Ref(), + }, + Property: &property.Sealed{ + Original: property.NewID().Ref(), + Items: []*property.SealedItem{ + { + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: property.FieldID("coordinates"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeCoordinates, + nil, + property.ValueTypeCoordinates.ValueFrom(property.Coordinates{ + property.LatLngHeight{Lat: 3.4, Lng: 5.34, Height: 100}, + property.LatLngHeight{Lat: 45.4, Lng: 2.34, Height: 100}, + property.LatLngHeight{Lat: 34.66, Lng: 654.34, Height: 100}, + }), + ), + }, + { + ID: property.FieldID("strokeColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff224222"), + ), + }, + { + ID: property.FieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + }, + }, + }, + }, + }, + }, + want: func() *kml.CompoundElement { + k := kml.KML( + kml.SharedStyle( + "01fmph48ykj1nd82r8e4znh6a6_style", + kml.LineStyle( + kml.Color(getColor("#ff224222")), + kml.Width(3), + ), + ), + ) + k = k.Add( + kml.Placemark( + kml.Name("test"), + kml.LineString(kml.Coordinates( + kml.Coordinate{Lon: 5.34, Lat: 3.4, Alt: 100}, + kml.Coordinate{Lon: 2.34, Lat: 45.4, Alt: 100}, + kml.Coordinate{Lon: 654.34, Lat: 34.66, Alt: 100}, + )), + kml.StyleURL("#01fmph48ykj1nd82r8e4znh6a6_style"), + ), + ) + return k + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + we := bytes.Buffer{} + _ = tt.want().WriteIndent(&we, "", " ") + wa := bytes.Buffer{} + assert.NoError(t, NewKMLEncoder(&wa).Encode(tt.target)) + assert.Equal(t, we.String(), wa.String()) + }) + } +} diff --git a/server/pkg/layer/encoding/shp.go b/server/pkg/layer/encoding/shp.go new file mode 100644 index 000000000..93677778c --- /dev/null +++ b/server/pkg/layer/encoding/shp.go @@ -0,0 +1,218 @@ +package encoding + +import ( + "errors" + "io" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + shp "github.com/reearth/reearth-backend/pkg/shp" + wsc "github.com/reearth/reearth-backend/pkg/writer" +) + +type SHPEncoder struct { + writer io.Writer +} + +func NewSHPEncoder(w io.Writer) *SHPEncoder { + return &SHPEncoder{ + writer: w, + } +} + +func (*SHPEncoder) MimeType() string { + return "application/octet-stream" +} + +func coordsToPoints(coords property.Coordinates) []shp.Point { + var res []shp.Point + for _, l := range coords { + res = append(res, shp.Point{ + X: l.Lng, + Y: l.Lat, + }) + } + return res +} + +func polygonToPoints(poly property.Polygon) ([]shp.Point, []int32) { + var res []shp.Point + parts := []int32{0} + for i, c := range poly { + partPoints := coordsToPoints(c) + res = append(res, partPoints...) + if i > 0 { + parts = append(parts, int32(len(partPoints)-1)) + } + } + return res, parts +} + +func getMaxMinPoints(points []shp.Point) (shp.Point, shp.Point) { + var max, min shp.Point + for i, p := range points { + if i == 0 || p.X > min.X { + max.X = p.X + } + if i == 0 || p.X < min.X { + min.X = p.X + } + if i == 0 || p.Y > max.Y { + max.Y = p.Y + } + if i == 0 || p.Y < min.Y { + min.Y = p.Y + } + } + return max, min +} + +func coordinatesToSHP(coordinates property.Coordinates) *shp.PolyLine { + points := coordsToPoints(coordinates) + max, min := getMaxMinPoints(points) + return &shp.PolyLine{ + Box: shp.Box{ + MinX: min.X, + MinY: min.Y, + MaxX: max.X, + MaxY: max.Y, + }, + NumParts: 1, + NumPoints: int32(len(points)), + Parts: []int32{0}, + Points: points, + } +} + +func polygonToSHP(poly property.Polygon) *shp.Polygon { + points, parts := polygonToPoints(poly) + max, min := getMaxMinPoints(points) + res := shp.Polygon{ + Box: shp.Box{ + MinX: min.X, + MinY: min.Y, + MaxX: max.X, + MaxY: max.Y, + }, + NumParts: int32(len(poly)), + NumPoints: int32(len(points)), + Parts: parts, + Points: points, + } + return &res +} + +func (e *SHPEncoder) encodeLayer(li *merging.SealedLayerItem) (sh shp.Shape, st shp.ShapeType, err error) { + if li.PluginID == nil || !layer.OfficialPluginID.Equal(*li.PluginID) { + return nil, 0, nil + } + switch li.ExtensionID.String() { + case "marker": + sh, st = e.encodeMarker(li) + case "polygon": + sh, st = e.encodePolygon(li) + case "polyline": + sh, st = e.encodePolyline(li) + } + if sh == nil || st == 0 { + return nil, 0, errors.New("invalid value type") + } + return sh, st, nil +} + +func (e *SHPEncoder) encodeLayerGroup(w *wsc.WriterSeeker, li *merging.SealedLayerGroup, shape *shp.Writer) error { + for _, ch := range li.Children { + if g, ok := ch.(*merging.SealedLayerGroup); ok { + if err := e.encodeLayerGroup(w, g, shape); err != nil { + return err + } + } else if i, ok := ch.(*merging.SealedLayerItem); ok { + l, t, err := e.encodeLayer(i) + if err != nil { + return err + } + + if shape == nil { + shape, err = shp.CreateFrom(w, t) + + if err != nil { + return err + } + + defer func() { + err = shape.Close() + }() + } + + if _, err := shape.Write(l); err != nil { + return err + } + } + } + + return nil +} + +func (e *SHPEncoder) Encode(layer merging.SealedLayer) error { + var err error + var w wsc.WriterSeeker + if i, ok := layer.(*merging.SealedLayerItem); ok { + l, t, err := e.encodeLayer(i) + if err != nil { + return err + } + shape, err := shp.CreateFrom(&w, t) + if err != nil { + return err + } + defer func() { + err = shape.Close() + + }() + if err != nil { + return err + } + _, err = shape.Write(l) + if err != nil { + return err + } + } else if g, ok := layer.(*merging.SealedLayerGroup); ok { + err := e.encodeLayerGroup(&w, g, nil) + if err != nil { + return err + } + } + _, err = w.WriteTo(e.writer) + if err != nil { + return err + } + return nil +} + +func (*SHPEncoder) encodeMarker(li *merging.SealedLayerItem) (shp.Shape, shp.ShapeType) { + f := li.Property.Field("location").Value().ValueLatLng() + if f == nil { + return nil, 0 + } + return &shp.Point{ + X: (*f).Lng, + Y: (*f).Lat, + }, shp.POINT +} + +func (*SHPEncoder) encodePolygon(li *merging.SealedLayerItem) (shp.Shape, shp.ShapeType) { + f := li.Property.Field("polygon").Value().ValuePolygon() + if f == nil || len(*f) == 0 { + return nil, 0 + } + return polygonToSHP(*f), shp.POLYGON +} + +func (*SHPEncoder) encodePolyline(li *merging.SealedLayerItem) (shp.Shape, shp.ShapeType) { + f := li.Property.Field("coordinates").Value().ValueCoordinates() + if f == nil || len(*f) == 0 { + return nil, 0 + } + return coordinatesToSHP(*f), shp.POLYLINE +} diff --git a/server/pkg/layer/encoding/shp_test.go b/server/pkg/layer/encoding/shp_test.go new file mode 100644 index 000000000..1f0437607 --- /dev/null +++ b/server/pkg/layer/encoding/shp_test.go @@ -0,0 +1,156 @@ +package encoding + +import ( + "os" + "testing" + + "github.com/jonas-p/go-shp" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Encoder = (*SHPEncoder)(nil) + +func TestEncodeSHP(t *testing.T) { + tests := []struct { + name string + layer *merging.SealedLayerItem + want shp.Shape + }{ + { + layer: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: layer.NewID(), + Parent: nil, + Scene: layer.NewSceneID(), + Property: nil, + Infobox: nil, + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polygon").Ref(), + }, + Property: &property.Sealed{ + Original: property.NewID().Ref(), + Items: []*property.SealedItem{ + { + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: property.FieldID("polygon"), + Val: property.NewValueAndDatasetValue( + property.ValueTypePolygon, + nil, + property.ValueTypePolygon.ValueFrom(property.Polygon{property.Coordinates{ + {Lat: 3.4, Lng: 5.34, Height: 100}, + {Lat: 45.4, Lng: 2.34, Height: 100}, + {Lat: 34.66, Lng: 654.34, Height: 100}, + }}), + ), + }, + }, + }, + }, + }, + }, + }, + want: &shp.Polygon{ + Box: shp.Box{ + MinX: 2.34, + MaxX: 654.34, + MinY: 3.4, + MaxY: 45.4, + }, + NumParts: 1, + NumPoints: 3, + Parts: []int32{0}, + Points: []shp.Point{ + {X: 5.34, Y: 3.4}, + {X: 2.34, Y: 45.4}, + {X: 654.34, Y: 34.66}, + }, + }, + }, + { + name: "polyline", + layer: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: layer.NewID(), + Parent: nil, + Name: "test", + Scene: layer.NewSceneID(), + Property: nil, + Infobox: nil, + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polyline").Ref(), + }, + Property: &property.Sealed{ + Original: property.NewID().Ref(), + Items: []*property.SealedItem{ + { + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: property.FieldID("coordinates"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeCoordinates, + nil, + property.ValueTypeCoordinates.ValueFrom(property.Coordinates{ + {Lat: 3.4, Lng: 5.34, Height: 100}, + {Lat: 45.4, Lng: 2.34, Height: 100}, + {Lat: 34.66, Lng: 654.34, Height: 100}, + }), + ), + }, + }, + }, + }, + }, + }, + }, + want: &shp.PolyLine{ + Box: shp.Box{ + MinX: 2.34, + MaxX: 654.34, + MinY: 3.4, + MaxY: 45.4, + }, + NumParts: 1, + NumPoints: 3, + Parts: []int32{0}, + Points: []shp.Point{ + {X: 5.34, Y: 3.4}, + {X: 2.34, Y: 45.4}, + {X: 654.34, Y: 34.66}, + }, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + // sequential test + + tmpFile, err := os.CreateTemp(os.TempDir(), "*.shp") + assert.NoError(t, err) + en := NewSHPEncoder(tmpFile) + assert.NoError(t, en.Encode(tt.layer)) + + shape, err := shp.Open(tmpFile.Name()) + assert.True(t, shape.Next()) + + assert.NoError(t, err) + assert.NoError(t, os.Remove(tmpFile.Name())) + assert.NoError(t, shape.Close()) + + _, p := shape.Shape() + assert.Equal(t, tt.want, p) + }) + } +} diff --git a/server/pkg/layer/group.go b/server/pkg/layer/group.go new file mode 100644 index 000000000..8a7ed345f --- /dev/null +++ b/server/pkg/layer/group.go @@ -0,0 +1,190 @@ +package layer + +import ( + "github.com/reearth/reearth-backend/pkg/property" +) + +type Group struct { + layerBase + layers *IDList + linkedDatasetSchema *DatasetSchemaID + root bool +} + +func (l *Group) ID() ID { + return l.layerBase.ID() +} + +func (l *Group) IDRef() *ID { + if l == nil { + return nil + } + return l.layerBase.IDRef() +} + +func (l *Group) Name() string { + return l.layerBase.Name() +} + +func (l *Group) Plugin() *PluginID { + if l == nil { + return nil + } + return l.layerBase.Plugin() +} + +func (l *Group) Extension() *PluginExtensionID { + if l == nil { + return nil + } + return l.layerBase.Extension() +} + +func (l *Group) UsesPlugin() bool { + return l.layerBase.UsesPlugin() +} + +func (l *Group) Property() *PropertyID { + if l == nil { + return nil + } + return l.layerBase.Property() +} + +func (l *Group) Infobox() *Infobox { + if l == nil { + return nil + } + return l.layerBase.Infobox() +} + +func (l *Group) IsVisible() bool { + if l == nil { + return false + } + return l.layerBase.IsVisible() +} + +func (l *Group) Rename(name string) { + if l == nil { + return + } + l.layerBase.Rename(name) +} + +func (l *Group) SetInfobox(infobox *Infobox) { + if l == nil { + return + } + l.layerBase.SetInfobox(infobox) +} + +func (l *Group) SetVisible(visible bool) { + if l == nil { + return + } + l.layerBase.SetVisible(visible) +} + +func (l *Group) SetPlugin(plugin *PluginID) { + if l == nil { + return + } + l.layerBase.SetPlugin(plugin) +} + +func (l *Group) IsLinked() bool { + if l == nil { + return false + } + return l.linkedDatasetSchema != nil +} + +func (l *Group) LinkedDatasetSchema() *DatasetSchemaID { + if l == nil { + return nil + } + return l.linkedDatasetSchema.CopyRef() +} + +func (l *Group) Link(ds DatasetSchemaID) { + if l == nil { + return + } + ds2 := ds + l.linkedDatasetSchema = &ds2 +} + +func (l *Group) Unlink() { + if l == nil { + return + } + l.linkedDatasetSchema = nil +} + +func (l *Group) Layers() *IDList { + if l == nil { + return nil + } + if l.layers == nil { + l.layers = NewIDList(nil) + } + return l.layers +} + +func (l *Group) MoveLayerFrom(id ID, index int, fromLayerGroup *Group) { + if l == nil { + return + } + + if fromLayerGroup == nil || fromLayerGroup.id == l.id { + l.layers.MoveLayer(id, index) + return + } + + fromLayerGroup.layers.RemoveLayer(id) + + if l.layers == nil { + l.layers = NewIDList(nil) + } + l.layers.AddLayer(id, index) +} + +func (l *Group) LayerRef() *Layer { + if l == nil { + return nil + } + var layer Layer = l + return &layer +} + +func (l *Group) IsRoot() bool { + if l == nil { + return false + } + return l.root +} + +func (l *Group) Properties() []PropertyID { + if l == nil { + return nil + } + return l.layerBase.Properties() +} + +func (l *Group) ValidateProperties(pm property.Map) error { + if l == nil { + return nil + } + return l.layerBase.ValidateProperties(pm) +} + +func (l *Group) Tags() *TagList { + if l == nil { + return nil + } + if l.layerBase.tags == nil { + l.layerBase.tags = NewTagList(nil) + } + return l.layerBase.tags +} diff --git a/server/pkg/layer/group_builder.go b/server/pkg/layer/group_builder.go new file mode 100644 index 000000000..230792c82 --- /dev/null +++ b/server/pkg/layer/group_builder.go @@ -0,0 +1,113 @@ +package layer + +func GroupFromLayer(l Layer) *Group { + li, ok := l.(*Group) + if !ok { + return nil + } + return li +} + +func GroupFromLayerRef(l *Layer) *Group { + if l == nil { + return nil + } + li, ok := (*l).(*Group) + if !ok { + return nil + } + return li +} + +type GroupBuilder struct { + l *Group +} + +func NewGroup() *GroupBuilder { + return &GroupBuilder{l: &Group{layerBase: layerBase{visible: true}}} +} + +func (b *GroupBuilder) Build() (*Group, error) { + if b.l.id.IsNil() { + return nil, ErrInvalidID + } + return b.l, nil +} + +func (b *GroupBuilder) MustBuild() *Group { + group, err := b.Build() + if err != nil { + panic(err) + } + return group +} + +func (b *GroupBuilder) base(layer layerBase) *GroupBuilder { + b.l.layerBase = layer + return b +} + +func (b *GroupBuilder) ID(id ID) *GroupBuilder { + b.l.id = id + return b +} + +func (b *GroupBuilder) NewID() *GroupBuilder { + b.l.id = NewID() + return b +} + +func (b *GroupBuilder) Scene(s SceneID) *GroupBuilder { + b.l.scene = s + return b +} + +func (b *GroupBuilder) Root(root bool) *GroupBuilder { + b.l.root = root + return b +} + +func (b *GroupBuilder) Name(name string) *GroupBuilder { + b.l.name = name + return b +} + +func (b *GroupBuilder) IsVisible(visible bool) *GroupBuilder { + b.l.visible = visible + return b +} + +func (b *GroupBuilder) Plugin(plugin *PluginID) *GroupBuilder { + b.l.plugin = plugin.CopyRef() + return b +} + +func (b *GroupBuilder) Extension(extension *PluginExtensionID) *GroupBuilder { + b.l.extension = extension.CloneRef() + return b +} + +func (b *GroupBuilder) Property(property *PropertyID) *GroupBuilder { + b.l.property = property.CopyRef() + return b +} + +func (b *GroupBuilder) Layers(ll *IDList) *GroupBuilder { + b.l.layers = ll + return b +} + +func (b *GroupBuilder) Infobox(infobox *Infobox) *GroupBuilder { + b.l.infobox = infobox + return b +} + +func (b *GroupBuilder) LinkedDatasetSchema(linkedDatasetSchema *DatasetSchemaID) *GroupBuilder { + b.l.linkedDatasetSchema = linkedDatasetSchema.CopyRef() + return b +} + +func (b *GroupBuilder) Tags(tags *TagList) *GroupBuilder { + b.l.tags = tags + return b +} diff --git a/server/pkg/layer/group_builder_test.go b/server/pkg/layer/group_builder_test.go new file mode 100644 index 000000000..cd03a2623 --- /dev/null +++ b/server/pkg/layer/group_builder_test.go @@ -0,0 +1,13 @@ +package layer + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGroupBuilder_Tags(t *testing.T) { + l := NewTagList(nil) + b := NewGroup().NewID().Tags(l).MustBuild() + assert.Same(t, l, b.Tags()) +} diff --git a/server/pkg/layer/group_test.go b/server/pkg/layer/group_test.go new file mode 100644 index 000000000..bad67b07e --- /dev/null +++ b/server/pkg/layer/group_test.go @@ -0,0 +1,137 @@ +package layer + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +var _ Layer = &Group{} + +var l1 = NewID() +var l2 = NewID() +var group = Group{ + layerBase: layerBase{ + id: NewID(), + name: "xxx", + visible: false, + plugin: MustPluginID("aaa~1.1.1").Ref(), + extension: PluginExtensionID("foo").Ref(), + property: nil, + infobox: nil, + tags: nil, + scene: SceneID{}, + }, + layers: &IDList{ + layers: append(make([]ID, 0), l1, l2), + layerIDs: map[ID]struct{}{l1: {}, l2: {}}, + }, + linkedDatasetSchema: nil, + root: true, +} + +func TestGroup_ID(t *testing.T) { + assert.NotNil(t, group.ID()) + assert.IsType(t, NewID(), group.ID()) +} + +func TestGroup_Name(t *testing.T) { + assert.Equal(t, "xxx", group.Name()) +} + +func TestGroup_Plugin(t *testing.T) { + assert.NotNil(t, group.Plugin()) + assert.True(t, MustPluginID("aaa~1.1.1").Equal(*group.Plugin())) +} + +func TestGroup_IDRef(t *testing.T) { + assert.NotNil(t, group.IDRef()) + assert.IsType(t, NewID(), group.ID()) +} + +func TestGroup_Extension(t *testing.T) { + assert.NotNil(t, group.Extension()) + assert.Equal(t, "foo", group.Extension().String()) +} + +func TestGroup_Infobox(t *testing.T) { + assert.Nil(t, group.Infobox()) +} + +func TestGroup_IsVisible(t *testing.T) { + assert.False(t, group.IsVisible()) +} + +func TestGroup_Property(t *testing.T) { + assert.Nil(t, group.Property()) +} + +func TestGroup_IsLinked(t *testing.T) { + assert.False(t, group.IsLinked()) +} + +func TestGroup_IsRoot(t *testing.T) { + assert.True(t, group.IsRoot()) +} + +func TestGroup_Rename(t *testing.T) { + group.Rename("fff") + assert.Equal(t, "fff", group.Name()) +} + +func TestGroup_SetInfobox(t *testing.T) { + inf := Infobox{ + property: NewPropertyID(), + fields: nil, + ids: nil, + } + group.SetInfobox(&inf) + assert.NotNil(t, group.Infobox()) +} + +func TestGroup_SetPlugin(t *testing.T) { + group.SetPlugin(MustPluginID("ccc~1.1.1").Ref()) + assert.NotNil(t, group.Plugin()) + assert.True(t, MustPluginID("ccc~1.1.1").Equal(*group.Plugin())) +} + +func TestGroup_SetVisible(t *testing.T) { + group.SetVisible(true) + assert.True(t, group.IsVisible()) +} + +func TestGroup_Properties(t *testing.T) { + assert.NotNil(t, group.Properties()) + assert.Equal(t, 1, len(group.Properties())) +} + +func TestGroup_UsesPlugin(t *testing.T) { + assert.True(t, group.UsesPlugin()) +} + +func TestGroup_LayerRef(t *testing.T) { + assert.NotNil(t, group.LayerRef()) +} + +func TestGroup_Layers(t *testing.T) { + assert.Equal(t, 2, len(group.Layers().Layers())) +} + +func TestGroup_LinkedDatasetSchema(t *testing.T) { + assert.Nil(t, group.LinkedDatasetSchema()) +} + +func TestGroup_Link(t *testing.T) { + group.Link(NewDatasetSchemaID()) + assert.NotNil(t, group.LinkedDatasetSchema()) +} + +func TestGroup_Unlink(t *testing.T) { + group.Unlink() + assert.Nil(t, group.LinkedDatasetSchema()) +} + +func TestGroup_MoveLayerFrom(t *testing.T) { + group.MoveLayerFrom(l1, 1, &group) + assert.Equal(t, l1, group.Layers().Layers()[1]) +} diff --git a/server/pkg/layer/id.go b/server/pkg/layer/id.go new file mode 100644 index 000000000..048c996b2 --- /dev/null +++ b/server/pkg/layer/id.go @@ -0,0 +1,70 @@ +package layer + +import ( + "sort" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type ID = id.LayerID +type InfoboxFieldID = id.InfoboxFieldID +type TagID = id.TagID +type SceneID = id.SceneID +type PluginID = id.PluginID +type PluginExtensionID = id.PluginExtensionID +type PropertyID = id.PropertyID +type DatasetID = id.DatasetID +type DatasetSchemaID = id.DatasetSchemaID + +var NewID = id.NewLayerID +var NewInfoboxFieldID = id.NewInfoboxFieldID +var NewTagID = id.NewTagID +var NewSceneID = id.NewSceneID +var NewPropertyID = id.NewPropertyID +var NewPropertySchemaID = id.NewPropertySchemaID +var NewDatasetID = id.NewDatasetID +var NewDatasetSchemaID = id.NewDatasetSchemaID + +var MustID = id.MustLayerID +var MustInfoboxFieldID = id.MustInfoboxFieldID +var MustTagID = id.MustTagID +var MustSceneID = id.MustSceneID +var MustPluginID = id.MustPluginID +var MustPropertyID = id.MustPropertyID + +var IDFrom = id.LayerIDFrom +var InfoboxFieldIDFrom = id.InfoboxFieldIDFrom +var TagIDFrom = id.TagIDFrom +var SceneIDFrom = id.SceneIDFrom +var PropertyIDFrom = id.PropertyIDFrom +var DatasetIDFrom = id.DatasetIDFrom +var DatasetSchemaIDFrom = id.DatasetSchemaIDFrom + +var IDFromRef = id.LayerIDFromRef +var InfoboxFieldIDFromRef = id.InfoboxFieldIDFromRef +var TagIDFromRef = id.TagIDFromRef +var SceneIDFromRef = id.SceneIDFromRef +var PropertyIDFromRef = id.PropertyIDFromRef +var DatasetIDFromRef = id.DatasetIDFromRef +var DatasetSchemaIDFromRef = id.DatasetSchemaIDFromRef + +type IDSet = id.LayerIDSet +type InfoboxFIeldIDSet = id.InfoboxFieldIDSet +type DatasetIDSet = id.DatasetIDSet +type DatasetIDList = id.DatasetIDList +type TagIDSet = id.TagIDSet +type TagIDList = id.TagIDList + +var NewIDSet = id.NewLayerIDSet +var NewInfoboxFIeldIDSet = id.NewInfoboxFieldIDSet +var NewDatasetIDSet = id.NewDatasetIDSet +var NewTagIDSet = id.NewTagIDSet + +var OfficialPluginID = id.OfficialPluginID +var ErrInvalidID = id.ErrInvalidID + +func sortIDs(a []ID) { + sort.SliceStable(a, func(i, j int) bool { + return a[i].Compare(a[j]) < 0 + }) +} diff --git a/server/pkg/layer/id_list.go b/server/pkg/layer/id_list.go new file mode 100644 index 000000000..224745d80 --- /dev/null +++ b/server/pkg/layer/id_list.go @@ -0,0 +1,220 @@ +package layer + +import "github.com/reearth/reearth-backend/pkg/id" + +type IDList struct { + layers id.LayerIDList + // for checking duplication + layerIDs map[ID]struct{} +} + +func NewIDList(layers []ID) *IDList { + ll := IDList{} + if len(layers) == 0 { + return &ll + } + + ll.layers = append([]ID{}, layers...) + ll.layerIDs = make(map[ID]struct{}, len(layers)) + for _, l := range layers { + ll.layerIDs[l] = struct{}{} + } + return &ll +} + +func (l *IDList) Layers() []ID { + if l == nil { + return nil + } + result := append([]ID{}, l.layers...) + return result +} + +func (l *IDList) HasLayer(id ID) bool { + if l == nil || len(l.layerIDs) == 0 { + return false + } + _, ok := l.layerIDs[id] + return ok +} + +func (l *IDList) LayerAt(index int) ID { + if l == nil || index < 0 || len(l.layers) <= index { + return ID{} + } + return l.layers[index] +} + +func (l *IDList) AtRef(index int) *ID { + if l == nil || index < 0 || len(l.layers) <= index { + return nil + } + return &l.layers[index] +} + +func (l *IDList) FindLayerIndex(id ID) int { + if l == nil { + return -1 + } + for i, l := range l.layers { + if l == id { + return i + } + } + return -1 +} + +func (l *IDList) LayerCount() int { + if l == nil { + return 0 + } + return len(l.layers) +} + +func (l *IDList) AddLayer(lid ID, index int) { + if l == nil || l.HasLayer(lid) { + return + } + if l.layerIDs == nil { + l.layerIDs = make(map[ID]struct{}) + } + + l.layerIDs[lid] = struct{}{} + + le := len(l.layers) + if index < 0 || le <= index { + l.layers = append(l.layers, lid) + } else { + l.layers = append(l.layers[:index], append([]ID{lid}, l.layers[index:]...)...) + } +} + +func (l *IDList) AppendLayers(lid ...ID) *IDList { + if l == nil { + return NewIDList(lid) + } + for _, i := range lid { + l.AddLayer(i, -1) + } + return l +} + +func (l *IDList) Merge(l2 *IDList) { + l.AppendLayers(l2.layers...) +} + +func (l *IDList) Clone() (l2 *IDList) { + if l == nil { + return l2 + } + return NewIDList(l.layers) +} + +func (l *IDList) AddOrMoveLayer(lid ID, index int) { + if l == nil { + return + } + + le := len(l.layers) + if index < 0 || le <= index { + index = le + } + + if l.HasLayer(lid) { + l.MoveLayer(lid, index) + return + } + l.layers = append(l.layers[:index], append([]ID{lid}, l.layers[index:]...)...) + l.layerIDs[lid] = struct{}{} +} + +func (l *IDList) MoveLayer(id ID, toIndex int) { + if l == nil { + return + } + + for fromIndex, layer := range l.layers { + if layer == id { + l.MoveLayerAt(fromIndex, toIndex) + return + } + } +} + +func (l *IDList) MoveLayerAt(fromIndex int, toIndex int) { + if l == nil || len(l.layers) == 0 { + return + } + + le := len(l.layers) + if fromIndex < 0 || le <= fromIndex { + return + } + if toIndex < 0 || le <= toIndex { + toIndex = le - 1 + } + if fromIndex == toIndex { + return + } + + f := l.layers[fromIndex] + l.layers = append(l.layers[:fromIndex], l.layers[fromIndex+1:]...) + newSlice := make([]ID, toIndex+1) + copy(newSlice, l.layers[:toIndex]) + newSlice[toIndex] = f + l.layers = append(newSlice, l.layers[toIndex:]...) +} + +func (l *IDList) RemoveLayer(ids ...ID) { + if l == nil { + return + } + + for i := 0; i < len(l.layers); i++ { + layer := l.layers[i] + for _, id := range ids { + if layer == id { + l.RemoveLayerAt(i) + i-- + break + } + } + } +} + +func (l *IDList) RemoveLayerAt(index int) { + if l == nil || len(l.layers) == 0 { + return + } + + le := len(l.layers) + if index < 0 || le <= index { + return + } + + layer := l.layers[index] + var layers []ID + if index == le { + layers = []ID{} + } else { + layers = l.layers[index+1:] + } + l.layers = append(l.layers[:index], layers...) + delete(l.layerIDs, layer) +} + +func (l *IDList) Empty() { + if l == nil { + return + } + + l.layers = nil + l.layerIDs = nil +} + +func (l *IDList) Strings() []string { + if l == nil { + return nil + } + return l.layers.Strings() +} diff --git a/server/pkg/layer/id_list_test.go b/server/pkg/layer/id_list_test.go new file mode 100644 index 000000000..b2981981b --- /dev/null +++ b/server/pkg/layer/id_list_test.go @@ -0,0 +1,138 @@ +package layer + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLayerIDList(t *testing.T) { + l1 := NewID() + l2 := NewID() + l3 := NewID() + l4 := NewID() + rawLayers := []ID{l1, l3} + layers := NewIDList(rawLayers) + + assert.NotNil(t, layers) + + // 1, 3 + + assert.Equal(t, rawLayers, layers.Layers()) + assert.Equal(t, 2, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l3, layers.LayerAt(1)) + assert.True(t, layers.HasLayer(l1)) + assert.False(t, layers.HasLayer(l2)) + assert.True(t, layers.HasLayer(l3)) + assert.False(t, layers.HasLayer(l4)) + + // 1, 2, 3 + + layers.AddLayer(l2, 1) + assert.Equal(t, 3, layers.LayerCount()) + assert.True(t, layers.HasLayer(l2)) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + + // 1, 2, 3 (ignored) + + layers.AddLayer(l1, 2) + assert.Equal(t, 3, layers.LayerCount()) + assert.True(t, layers.HasLayer(l2)) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + + // 1, 2, 3, 4 + + layers.AddLayer(l4, 10) + assert.Equal(t, 4, layers.LayerCount()) + assert.True(t, layers.HasLayer(l4)) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + assert.Equal(t, l4, layers.LayerAt(3)) + + // 3, 1, 2, 4 + + layers.MoveLayer(l3, 0) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l3, layers.LayerAt(0)) + assert.Equal(t, l1, layers.LayerAt(1)) + assert.Equal(t, l2, layers.LayerAt(2)) + assert.Equal(t, l4, layers.LayerAt(3)) + + // 1, 2, 4, 3 + + layers.MoveLayer(l3, 3) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l4, layers.LayerAt(2)) + assert.Equal(t, l3, layers.LayerAt(3)) + + // 1, 2, 3, 4 + + layers.MoveLayer(l4, 4) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + assert.Equal(t, l4, layers.LayerAt(3)) + + // 1, 2, 3, 4 + + layers.MoveLayer(l4, 10) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + assert.Equal(t, l4, layers.LayerAt(3)) + + // 1, 2, 3, 4 + + layers.MoveLayer(l4, -1) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + assert.Equal(t, l4, layers.LayerAt(3)) + + // 1, 3, 4 + + layers.RemoveLayer(l2) + assert.Equal(t, 3, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l3, layers.LayerAt(1)) + assert.Equal(t, l4, layers.LayerAt(2)) + assert.False(t, layers.HasLayer(l2)) + + // 1, 3, 4, 2 + + layers.AddOrMoveLayer(l2, 3) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l3, layers.LayerAt(1)) + assert.Equal(t, l4, layers.LayerAt(2)) + assert.Equal(t, l2, layers.LayerAt(3)) + assert.True(t, layers.HasLayer(l2)) + + // 1, 2, 3, 4 + + layers.AddOrMoveLayer(l2, 1) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + assert.Equal(t, l4, layers.LayerAt(3)) + assert.True(t, layers.HasLayer(l2)) + + // 1, 3 + + layers.RemoveLayer(l2, l4) + assert.Equal(t, 2, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l3, layers.LayerAt(1)) +} diff --git a/server/pkg/layer/infobox.go b/server/pkg/layer/infobox.go new file mode 100644 index 000000000..26ca009cb --- /dev/null +++ b/server/pkg/layer/infobox.go @@ -0,0 +1,191 @@ +package layer + +import ( + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Infobox struct { + property PropertyID + fields []*InfoboxField + // for checking duplication + ids map[InfoboxFieldID]struct{} +} + +func NewInfobox(fields []*InfoboxField, p PropertyID) *Infobox { + infobox := Infobox{ + property: p, + fields: make([]*InfoboxField, len(fields)), + ids: make(map[InfoboxFieldID]struct{}, len(fields)), + } + for i, f := range fields { + if f == nil { + continue + } + infobox.fields[i] = f + infobox.ids[f.ID()] = struct{}{} + } + return &infobox +} + +func (i *Infobox) Property() PropertyID { + return i.property +} + +func (i *Infobox) PropertyRef() *PropertyID { + if i == nil { + return nil + } + pid := i.property + return &pid +} + +func (i *Infobox) Fields() []*InfoboxField { + if i == nil { + return nil + } + return append([]*InfoboxField{}, i.fields...) +} + +func (i *Infobox) Field(field InfoboxFieldID) *InfoboxField { + for _, f := range i.fields { + if f.ID() == field { + return f + } + } + return nil +} + +func (i *Infobox) FieldAt(index int) *InfoboxField { + if i == nil || index < 0 || len(i.fields) <= index { + return nil + } + return i.fields[index] +} + +func (i *Infobox) FieldsByPlugin(pid PluginID, eid *PluginExtensionID) []*InfoboxField { + if i == nil { + return nil + } + fields := make([]*InfoboxField, 0, len(i.fields)) + for _, f := range i.fields { + if f.Plugin().Equal(pid) && (eid == nil || f.Extension() == *eid) { + fields = append(fields, f) + } + } + return fields +} + +func (i *Infobox) Has(id InfoboxFieldID) bool { + _, ok := i.ids[id] + return ok +} + +func (i *Infobox) Count() int { + return len(i.fields) +} + +func (i *Infobox) Add(field *InfoboxField, index int) { + l := len(i.fields) + if index < 0 || l <= index { + index = l + } + + id := field.ID() + if i.Has(id) { + return + } + i.fields = append(i.fields[:index], append([]*InfoboxField{field}, i.fields[index:]...)...) + i.ids[id] = struct{}{} +} + +func (i *Infobox) Move(field InfoboxFieldID, toIndex int) { + for fromIndex, f := range i.fields { + if f.ID() == field { + i.MoveAt(fromIndex, toIndex) + return + } + } +} + +func (i *Infobox) MoveAt(fromIndex int, toIndex int) { + l := len(i.fields) + if fromIndex < 0 || l <= fromIndex { + return + } + if toIndex < 0 || l <= toIndex { + toIndex = l - 1 + } + f := i.fields[fromIndex] + + i.fields = append(i.fields[:fromIndex], i.fields[fromIndex+1:]...) + newSlice := make([]*InfoboxField, toIndex+1) + copy(newSlice, i.fields[:toIndex]) + newSlice[toIndex] = f + i.fields = append(newSlice, i.fields[toIndex:]...) +} + +func (i *Infobox) Remove(field InfoboxFieldID) { + for index, f := range i.fields { + if f.ID() == field { + i.RemoveAt(index) + return + } + } +} + +func (i *Infobox) RemoveAllByPlugin(pid PluginID, eid *PluginExtensionID) []PropertyID { + if i == nil { + return nil + } + + var properties []PropertyID + for j := 0; j < len(i.fields); j++ { + if i.fields[j].plugin.Equal(pid) && (eid == nil || i.fields[j].Extension() == *eid) { + properties = append(properties, i.fields[j].Property()) + i.fields = append(i.fields[:j], i.fields[j+1:]...) + j-- + } + } + return properties +} + +func (i *Infobox) RemoveAt(index int) { + l := len(i.fields) + if index < 0 || l <= index { + index = l + } + + f := i.fields[index] + if index == l { + i.fields = i.fields[:index] + } else { + i.fields = append(i.fields[:index], i.fields[index+1:]...) + } + delete(i.ids, f.ID()) +} + +func (i *Infobox) ValidateProperties(pm property.Map) error { + if i == nil || pm == nil { + return nil + } + + lp := pm[i.property] + if lp == nil { + return errors.New("property does not exist") + } + if !lp.Schema().Equal(builtin.PropertySchemaIDInfobox) { + return errors.New("property has a invalid schema") + } + + for i, f := range i.fields { + if err := f.ValidateProperty(pm); err != nil { + return fmt.Errorf("field[%d](%s): %w", i, f.ID(), err) + } + } + + return nil +} diff --git a/server/pkg/layer/infobox_field.go b/server/pkg/layer/infobox_field.go new file mode 100644 index 000000000..3c363e0e8 --- /dev/null +++ b/server/pkg/layer/infobox_field.go @@ -0,0 +1,55 @@ +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/idgen --name InfoboxField --output ../id + +package layer + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/property" +) + +type InfoboxField struct { + id InfoboxFieldID + plugin PluginID + extension PluginExtensionID + property PropertyID +} + +func (i *InfoboxField) ID() InfoboxFieldID { + return i.id +} + +func (i *InfoboxField) Plugin() PluginID { + return i.plugin +} + +func (i *InfoboxField) Extension() PluginExtensionID { + return i.extension +} + +func (i *InfoboxField) Property() PropertyID { + return i.property +} + +func (i *InfoboxField) PropertyRef() *PropertyID { + if i == nil { + return nil + } + return i.property.Ref() +} + +func (i *InfoboxField) ValidateProperty(pm property.Map) error { + if i == nil || pm == nil { + return nil + } + + lp := pm[i.property] + if lp == nil { + return errors.New("property does not exist") + } + if !lp.Schema().Equal(NewPropertySchemaID(i.plugin, i.extension.String())) { + return errors.New("property has a invalid schema") + } + + return nil +} diff --git a/server/pkg/layer/infobox_field_builder.go b/server/pkg/layer/infobox_field_builder.go new file mode 100644 index 000000000..47ce14ff0 --- /dev/null +++ b/server/pkg/layer/infobox_field_builder.go @@ -0,0 +1,51 @@ +package layer + +type InfoboxFieldBuilder struct { + i *InfoboxField +} + +func NewInfoboxField() *InfoboxFieldBuilder { + return &InfoboxFieldBuilder{i: &InfoboxField{}} +} + +func (b *InfoboxFieldBuilder) Build() (*InfoboxField, error) { + if b.i.id.IsNil() || + string(b.i.extension) == "" || + b.i.property.IsNil() { + return nil, ErrInvalidID + } + return b.i, nil +} + +func (b *InfoboxFieldBuilder) MustBuild() *InfoboxField { + i, err := b.Build() + if err != nil { + panic(err) + } + return i +} + +func (b *InfoboxFieldBuilder) ID(id InfoboxFieldID) *InfoboxFieldBuilder { + b.i.id = id + return b +} + +func (b *InfoboxFieldBuilder) NewID() *InfoboxFieldBuilder { + b.i.id = NewInfoboxFieldID() + return b +} + +func (b *InfoboxFieldBuilder) Plugin(plugin PluginID) *InfoboxFieldBuilder { + b.i.plugin = plugin + return b +} + +func (b *InfoboxFieldBuilder) Extension(extension PluginExtensionID) *InfoboxFieldBuilder { + b.i.extension = extension + return b +} + +func (b *InfoboxFieldBuilder) Property(p PropertyID) *InfoboxFieldBuilder { + b.i.property = p + return b +} diff --git a/server/pkg/layer/infobox_test.go b/server/pkg/layer/infobox_test.go new file mode 100644 index 000000000..ed9626725 --- /dev/null +++ b/server/pkg/layer/infobox_test.go @@ -0,0 +1,85 @@ +package layer + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestInfobox(t *testing.T) { + f1 := &InfoboxField{id: NewInfoboxFieldID()} + f2 := &InfoboxField{id: NewInfoboxFieldID()} + f3 := &InfoboxField{id: NewInfoboxFieldID()} + f4 := &InfoboxField{id: NewInfoboxFieldID()} + fields := []*InfoboxField{f1, f2, f3} + infobox := NewInfobox(fields, NewPropertyID()) + + assert.NotNil(t, infobox) + assert.Equal(t, fields, infobox.Fields()) + assert.Equal(t, f1, infobox.Field(f1.ID())) + assert.Equal(t, f3, infobox.FieldAt(2)) + assert.Equal(t, 3, infobox.Count()) + assert.True(t, infobox.Has(f1.ID())) + assert.False(t, infobox.Has(f4.ID())) + + infobox.Add(f4, 3) + assert.True(t, infobox.Has(f4.ID())) + assert.Equal(t, 4, infobox.Count()) + assert.Equal(t, f1, infobox.FieldAt(0)) + assert.Equal(t, f2, infobox.FieldAt(1)) + assert.Equal(t, f3, infobox.FieldAt(2)) + assert.Equal(t, f4, infobox.FieldAt(3)) + + infobox.Move(f4.ID(), 2) + assert.Equal(t, f1, infobox.FieldAt(0)) + assert.Equal(t, f2, infobox.FieldAt(1)) + assert.Equal(t, f4, infobox.FieldAt(2)) + assert.Equal(t, f3, infobox.FieldAt(3)) + + infobox.Remove(f2.ID()) + assert.Equal(t, 3, infobox.Count()) + assert.False(t, infobox.Has(f2.ID())) + assert.Nil(t, infobox.Field(f2.ID())) + assert.Equal(t, f1, infobox.FieldAt(0)) + assert.Equal(t, f4, infobox.FieldAt(1)) + assert.Equal(t, f3, infobox.FieldAt(2)) + + infobox.Move(f4.ID(), 2) + assert.Equal(t, f1, infobox.FieldAt(0)) + assert.Equal(t, f3, infobox.FieldAt(1)) + assert.Equal(t, f4, infobox.FieldAt(2)) +} + +func TestInfobox_FieldsByPlugin(t *testing.T) { + pid1 := MustPluginID("xxx~1.1.1") + pid2 := MustPluginID("xxy~1.1.1") + f1 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid1, extension: "a", property: NewPropertyID()} + f2 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid2, extension: "b", property: NewPropertyID()} + f3 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid1, extension: "c", property: NewPropertyID()} + f4 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid2, extension: "d", property: NewPropertyID()} + infobox := NewInfobox([]*InfoboxField{f1, f2, f3, f4}, NewPropertyID()) + + assert.Equal(t, []*InfoboxField(nil), (*Infobox)(nil).FieldsByPlugin(pid1, nil)) + assert.Equal(t, []*InfoboxField{f1, f3}, infobox.FieldsByPlugin(pid1, nil)) + assert.Equal(t, []*InfoboxField{f2, f4}, infobox.FieldsByPlugin(pid2, nil)) + assert.Equal(t, []*InfoboxField{f2}, infobox.FieldsByPlugin(pid2, PluginExtensionID("b").Ref())) +} + +func TestInfobox_RemoveAllByPlugin(t *testing.T) { + pid1 := MustPluginID("xxx~1.1.1") + pid2 := MustPluginID("xxy~1.1.1") + f1 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid1, extension: "a", property: NewPropertyID()} + f2 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid2, extension: "b", property: NewPropertyID()} + f3 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid1, extension: "c", property: NewPropertyID()} + f4 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid2, extension: "d", property: NewPropertyID()} + infobox := NewInfobox([]*InfoboxField{f1, f2, f3, f4}, NewPropertyID()) + + assert.Equal(t, []PropertyID(nil), (*Infobox)(nil).RemoveAllByPlugin(pid1, nil)) + assert.Equal(t, []*InfoboxField{f1, f2, f3, f4}, infobox.fields) + assert.Equal(t, []PropertyID{f1.Property(), f3.Property()}, infobox.RemoveAllByPlugin(pid1, nil)) + assert.Equal(t, []*InfoboxField{f2, f4}, infobox.fields) + assert.Equal(t, []PropertyID(nil), infobox.RemoveAllByPlugin(pid1, nil)) + assert.Equal(t, []*InfoboxField{f2, f4}, infobox.fields) + assert.Equal(t, []PropertyID{f4.Property()}, infobox.RemoveAllByPlugin(pid2, PluginExtensionID("d").Ref())) + assert.Equal(t, []*InfoboxField{f2}, infobox.fields) +} diff --git a/server/pkg/layer/initializer.go b/server/pkg/layer/initializer.go new file mode 100644 index 000000000..7fc9ba2d7 --- /dev/null +++ b/server/pkg/layer/initializer.go @@ -0,0 +1,296 @@ +package layer + +import ( + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +var ( + ErrInitializationInfobox = errors.New("infobox") + ErrInitializationInfoboxWith = rerror.With(ErrInitializationInfobox) + ErrInitializationProperty = errors.New("property") + ErrInitializationPropertyWith = rerror.With(ErrInitializationProperty) +) + +type InitializerResult struct { + Root ID + Layers Map + Properties property.Map +} + +func (r InitializerResult) RootLayer() Layer { + return r.Layers.Layer(r.Root) +} + +func (r InitializerResult) RootLayerRef() *Layer { + return r.Layers[r.Root] +} + +func (r InitializerResult) RootLayerGroup() *Group { + return r.Layers.Group(r.Root) +} + +func (r InitializerResult) RootLayerItem() *Item { + return r.Layers.Item(r.Root) +} + +type Initializer struct { + ID *ID `json:"id"` + Plugin *PluginID `json:"plugin"` + Extension *PluginExtensionID `json:"extension"` + Name string `json:"name"` + Infobox *InitializerInfobox `json:"infobox"` + PropertyID *PropertyID `json:"propertyId"` + Property *property.Initializer `json:"property"` + Layers []*Initializer `json:"layers"` + LayerIDs []ID `json:"layerIds"` + IsVisible *bool `json:"isVisible"` + LinkedDatasetSchema *DatasetSchemaID `json:"linkedDatasetSchema"` + LinkedDataset *DatasetID `json:"linkedDataset"` +} + +func (i *Initializer) Clone() *Initializer { + if i == nil { + return nil + } + + var isVisible *bool + if i.IsVisible != nil { + isVisible2 := *i.IsVisible + isVisible = &isVisible2 + } + + var layers []*Initializer + if i.Layers != nil { + layers = make([]*Initializer, 0, len(i.Layers)) + for _, l := range i.Layers { + layers = append(layers, l.Clone()) + } + } + + var layerIDs []ID + if len(i.LayerIDs) > 0 { + layerIDs = append([]ID{}, i.LayerIDs...) + } + + return &Initializer{ + ID: i.ID.CopyRef(), + Plugin: i.Plugin.CopyRef(), + Extension: i.Extension.CloneRef(), + Name: i.Name, + Infobox: i.Infobox.Clone(), + PropertyID: i.PropertyID.CopyRef(), + Property: i.Property.Clone(), + Layers: layers, + LayerIDs: layerIDs, + IsVisible: isVisible, + LinkedDatasetSchema: i.LinkedDatasetSchema.CopyRef(), + LinkedDataset: i.LinkedDataset.CopyRef(), + } +} + +func (i *Initializer) Layer(sid SceneID) (r InitializerResult, err error) { + if i == nil { + return + } + + ib, pm, err2 := i.Infobox.Infobox(sid) + if err2 != nil { + err = ErrInitializationInfoboxWith(err2) + return + } + r.Properties = r.Properties.Merge(pm) + + lid := i.ID + if i.ID == nil { + lid = NewID().Ref() + } + + pid := i.PropertyID + lp, err2 := i.Property.Property(sid) + if err2 != nil { + err = ErrInitializationPropertyWith(err2) + return + } + if lp != nil { + pid = lp.IDRef() + r.Properties = r.Properties.Add(lp) + } + + lay := New(). + ID(*lid). + Plugin(i.Plugin). + Extension(i.Extension). + Infobox(ib). + Scene(sid). + Property(pid). + Name(i.Name). + IsVisibleRef(i.IsVisible) + + var l Layer + if i.Layers != nil { + layers := NewIDList(nil) + + for i, lay2 := range i.Layers { + r2, err2 := lay2.Layer(sid) + if err2 != nil { + err = rerror.From(fmt.Sprint(i), err2) + return + } + if rootLayer := r2.RootLayer(); rootLayer != nil { + layers = layers.AppendLayers(rootLayer.ID()) + r.Layers = r.Layers.Merge(r2.Layers) + r.Properties = r.Properties.Merge(r2.Properties) + } + } + + l, err = lay.Group().LinkedDatasetSchema(i.LinkedDatasetSchema).Layers(layers).Build() + } else if i.LayerIDs != nil { + l, err = lay.Group().LinkedDatasetSchema(i.LinkedDatasetSchema).Layers(NewIDList(i.LayerIDs)).Build() + } else { + l, err = lay.Item().LinkedDataset(i.LinkedDataset).Build() + } + + if err != nil { + err = fmt.Errorf("failed to initialize layer: %w", err) + return + } + + r.Layers = r.Layers.Add(&l) + r.Root = l.ID() + return +} + +func (i *Initializer) MustBeLayer(sid SceneID) InitializerResult { + r, err := i.Layer(sid) + if err != nil { + panic(err) + } + return r +} + +type InitializerInfobox struct { + PropertyID *PropertyID `json:"propertyId"` + Property *property.Initializer `json:"property"` + Fields []*InitializerInfoboxField `json:"fields"` +} + +func (i *InitializerInfobox) Clone() *InitializerInfobox { + if i == nil { + return nil + } + + var fields []*InitializerInfoboxField + if i.Fields != nil { + fields = make([]*InitializerInfoboxField, 0, len(i.Fields)) + for _, f := range i.Fields { + fields = append(fields, f.Clone()) + } + } + + return &InitializerInfobox{ + PropertyID: i.PropertyID.CopyRef(), + Property: i.Property.Clone(), + Fields: fields, + } +} + +func (i *InitializerInfobox) Infobox(scene SceneID) (*Infobox, property.Map, error) { + if i == nil { + return nil, nil, nil + } + + pm := property.Map{} + var fields []*InfoboxField + if i.Fields != nil { + fields = make([]*InfoboxField, 0, len(i.Fields)) + for i, f := range i.Fields { + ibf, ibfp, err := f.InfoboxField(scene) + if err != nil { + return nil, nil, rerror.From(fmt.Sprint(i), err) + } + fields = append(fields, ibf) + pm = pm.Add(ibfp) + } + } + + var ibp *property.Property + ibpid := i.PropertyID + if ibpid == nil { + var err error + ibp, err = i.Property.PropertyIncludingEmpty(scene, builtin.PropertySchemaIDInfobox) + if err != nil { + return nil, nil, ErrInitializationPropertyWith(err) + } + if ibp != nil { + ibpid = ibp.IDRef() + pm = pm.Add(ibp) + } + } + if ibpid == nil { + return nil, nil, errors.New("infobox property id is empty") + } + + return NewInfobox(fields, *ibpid), pm, nil +} + +type InitializerInfoboxField struct { + ID *InfoboxFieldID `json:"id"` + Plugin PluginID `json:"plugin"` + Extension PluginExtensionID `json:"extension"` + PropertyID *PropertyID `json:"propertyId"` + Property *property.Initializer `json:"property"` +} + +func (i *InitializerInfoboxField) Clone() *InitializerInfoboxField { + if i == nil { + return nil + } + + return &InitializerInfoboxField{ + ID: i.ID.CopyRef(), + Plugin: i.Plugin, + Extension: i.Extension, + PropertyID: i.PropertyID.CopyRef(), + Property: i.Property.Clone(), + } +} + +func (i *InitializerInfoboxField) InfoboxField(scene SceneID) (*InfoboxField, *property.Property, error) { + if i == nil { + return nil, nil, nil + } + + psid := NewPropertySchemaID(i.Plugin, i.Extension.String()) + + fid := i.ID + if i.ID == nil { + fid = NewInfoboxFieldID().Ref() + } + + pid := i.PropertyID + var p *property.Property + if pid == nil { + p2, err := i.Property.PropertyIncludingEmpty(scene, psid) + if err != nil { + return nil, nil, ErrInitializationPropertyWith(err) + } + if p2 != nil { + p = p2 + pid = p2.IDRef() + } + } + if pid == nil { + return nil, nil, errors.New("infobox field property id is empty") + } + + f, err := NewInfoboxField().ID(*fid).Plugin(i.Plugin).Extension(i.Extension).Property(*pid).Build() + if err != nil { + return nil, nil, err + } + return f, p, nil +} diff --git a/server/pkg/layer/initializer_test.go b/server/pkg/layer/initializer_test.go new file mode 100644 index 000000000..3f7493b3d --- /dev/null +++ b/server/pkg/layer/initializer_test.go @@ -0,0 +1,187 @@ +package layer + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestInitializer_Clone(t *testing.T) { + isVisible := false + i := &Initializer{ + ID: NewID().Ref(), + Plugin: MustPluginID("reearth").Ref(), + Extension: PluginExtensionID("marker").Ref(), + Name: "hoge", + Infobox: &InitializerInfobox{}, + PropertyID: NewPropertyID().Ref(), + Property: &property.Initializer{ + ID: NewPropertyID().Ref(), + }, + Layers: []*Initializer{{}}, + IsVisible: &isVisible, + LinkedDatasetSchema: NewDatasetSchemaID().Ref(), + LinkedDataset: NewDatasetID().Ref(), + } + + actual := i.Clone() + + assert.NotSame(t, i, actual) + assert.NotSame(t, i.ID, actual.ID) + assert.NotSame(t, i.Plugin, actual.Plugin) + assert.NotSame(t, i.Extension, actual.Extension) + assert.NotSame(t, i.Infobox, actual.Infobox) + assert.NotSame(t, i.PropertyID, actual.PropertyID) + assert.NotSame(t, i.Property, actual.Property) + assert.NotSame(t, i.Layers, actual.Layers) + assert.NotSame(t, i.Layers[0], actual.Layers[0]) + assert.NotSame(t, i.IsVisible, actual.IsVisible) + assert.NotSame(t, i.LinkedDatasetSchema, actual.LinkedDatasetSchema) + assert.NotSame(t, i.LinkedDataset, actual.LinkedDataset) + assert.Equal(t, i, actual) +} + +func TestInitializer_Layer(t *testing.T) { + sid := NewSceneID() + isVisible := false + i := &Initializer{ + ID: NewID().Ref(), + Plugin: MustPluginID("reearth").Ref(), + Extension: PluginExtensionID("marker").Ref(), + Name: "hoge", + Infobox: &InitializerInfobox{ + PropertyID: NewPropertyID().Ref(), + }, + PropertyID: NewPropertyID().Ref(), + IsVisible: &isVisible, + LinkedDatasetSchema: NewDatasetSchemaID().Ref(), + LinkedDataset: NewDatasetID().Ref(), + Layers: []*Initializer{{ + ID: NewID().Ref(), + Layers: []*Initializer{{ + ID: NewID().Ref(), + }}, + }}, + } + + expected1 := New(). + ID(*i.ID). + Scene(sid). + Plugin(i.Plugin). + Extension(i.Extension). + Name(i.Name). + IsVisibleRef(i.IsVisible). + Infobox(NewInfobox(nil, *i.Infobox.PropertyID)). + Property(i.PropertyID). + Group(). + Layers(NewIDList([]ID{*i.Layers[0].ID})). + LinkedDatasetSchema(i.LinkedDatasetSchema). + MustBuild() + expected2 := New().ID(*i.Layers[0].ID).Scene(sid).Group().Layers(NewIDList([]ID{*i.Layers[0].Layers[0].ID})).MustBuild() + expected3 := New().ID(*i.Layers[0].Layers[0].ID).Scene(sid).Item().MustBuild() + + actual, err := i.Layer(sid) + assert.NoError(t, err) + assert.Equal(t, Map{ + expected1.ID(): expected1.LayerRef(), + expected2.ID(): expected2.LayerRef(), + expected3.ID(): expected3.LayerRef(), + }, actual.Layers) + + // check if a new id is generated + i.ID = nil + actual, err = i.Layer(sid) + assert.NoError(t, err) + assert.False(t, actual.RootLayer().ID().IsEmpty()) +} + +func TestInitializerInfobox_Clone(t *testing.T) { + i := &InitializerInfobox{ + PropertyID: NewPropertyID().Ref(), + Property: &property.Initializer{ + ID: NewPropertyID().Ref(), + }, + Fields: []*InitializerInfoboxField{{ + ID: NewInfoboxFieldID().Ref(), + Plugin: MustPluginID("reearth"), + Extension: PluginExtensionID("marker"), + PropertyID: NewPropertyID().Ref(), + }}, + } + + actual := i.Clone() + + assert.NotSame(t, i, actual) + assert.NotSame(t, i.Property, actual.Property) + assert.NotSame(t, i.Fields, actual.Fields) + assert.NotSame(t, i.Fields[0], actual.Fields[0]) + assert.Equal(t, i, actual) +} + +func TestInitializerInfobox_Infobox(t *testing.T) { + sid := NewSceneID() + i := &InitializerInfobox{ + PropertyID: NewPropertyID().Ref(), + Fields: []*InitializerInfoboxField{{ + ID: NewInfoboxFieldID().Ref(), + Plugin: MustPluginID("reearth"), + Extension: PluginExtensionID("marker"), + PropertyID: NewPropertyID().Ref(), + }}, + } + + expected := NewInfobox([]*InfoboxField{ + NewInfoboxField(). + ID(*i.Fields[0].ID). + Plugin(i.Fields[0].Plugin). + Extension(i.Fields[0].Extension). + Property(*i.Fields[0].PropertyID). + MustBuild(), + }, *i.PropertyID) + actual, _, err := i.Infobox(sid) + + assert.NoError(t, err) + assert.Equal(t, expected, actual) +} + +func TestInitializerInfoboxField_Clone(t *testing.T) { + i := &InitializerInfoboxField{ + ID: NewInfoboxFieldID().Ref(), + Plugin: MustPluginID("reearth"), + Extension: PluginExtensionID("marker"), + PropertyID: NewPropertyID().Ref(), + Property: &property.Initializer{ + ID: NewPropertyID().Ref(), + }, + } + + actual := i.Clone() + + assert.NotSame(t, i, actual) + assert.NotSame(t, i.Property, actual.Property) + assert.NotSame(t, i.ID, actual.ID) + assert.Equal(t, i, actual) +} + +func TestInitializerInfoboxField_InfoboxField(t *testing.T) { + sid := NewSceneID() + i := &InitializerInfoboxField{ + ID: NewInfoboxFieldID().Ref(), + Plugin: MustPluginID("reearth"), + Extension: PluginExtensionID("marker"), + PropertyID: NewPropertyID().Ref(), + } + + expected := NewInfoboxField().ID(*i.ID).Plugin(i.Plugin).Extension(i.Extension).Property(*i.PropertyID).MustBuild() + actual, _, err := i.InfoboxField(sid) + + assert.NoError(t, err) + assert.Equal(t, expected, actual) + + // check if a new id is generated + i.ID = nil + actual, _, err = i.InfoboxField(sid) + assert.NoError(t, err) + assert.False(t, actual.ID().IsEmpty()) +} diff --git a/server/pkg/layer/item.go b/server/pkg/layer/item.go new file mode 100644 index 000000000..971d7ddf5 --- /dev/null +++ b/server/pkg/layer/item.go @@ -0,0 +1,156 @@ +package layer + +import ( + "github.com/reearth/reearth-backend/pkg/property" +) + +type Item struct { + layerBase + linkedDataset *DatasetID +} + +func (l *Item) ID() ID { + return l.layerBase.ID() +} + +func (l *Item) IDRef() *ID { + if l == nil { + return nil + } + return l.layerBase.IDRef() +} + +func (l *Item) Name() string { + if l == nil { + return "" + } + return l.layerBase.Name() +} + +func (l *Item) IsVisible() bool { + if l == nil { + return false + } + return l.layerBase.IsVisible() +} + +func (l *Item) Plugin() *PluginID { + if l == nil { + return nil + } + return l.layerBase.Plugin() +} + +func (l *Item) Extension() *PluginExtensionID { + if l == nil { + return nil + } + return l.layerBase.Extension() +} + +func (l *Item) UsesPlugin() bool { + if l == nil { + return false + } + return l.layerBase.UsesPlugin() +} + +func (l *Item) Property() *PropertyID { + if l == nil { + return nil + } + return l.layerBase.Property() +} + +func (l *Item) Infobox() *Infobox { + if l == nil { + return nil + } + return l.layerBase.Infobox() +} + +func (l *Item) Rename(name string) { + if l == nil { + return + } + l.layerBase.Rename(name) +} + +func (l *Item) SetVisible(visible bool) { + if l == nil { + return + } + l.layerBase.SetVisible(visible) +} + +func (l *Item) SetInfobox(infobox *Infobox) { + if l == nil { + return + } + l.layerBase.SetInfobox(infobox) +} + +func (l *Item) SetPlugin(plugin *PluginID) { + if l == nil { + return + } + l.layerBase.SetPlugin(plugin) +} + +func (l *Item) IsLinked() bool { + if l == nil { + return false + } + return l.linkedDataset != nil +} + +func (l *Item) LinkedDataset() *DatasetID { + if l == nil { + return nil + } + return l.linkedDataset.CopyRef() +} + +func (l *Item) Link(ds DatasetID) { + if l == nil { + return + } + ds2 := ds + l.linkedDataset = &ds2 +} + +func (l *Item) Unlink() { + if l == nil { + return + } + l.linkedDataset = nil +} + +func (l *Item) LayerRef() *Layer { + if l == nil { + return nil + } + var layer Layer = l + return &layer +} + +func (l *Item) Properties() []PropertyID { + if l == nil { + return nil + } + return l.layerBase.Properties() +} + +func (l *Item) ValidateProperties(pm property.Map) error { + if l == nil { + return nil + } + return l.layerBase.ValidateProperties(pm) +} + +func (l *Item) Tags() *TagList { + if l.layerBase.tags == nil { + l.layerBase.tags = NewTagList(nil) + } + return l.layerBase.tags +} diff --git a/server/pkg/layer/item_builder.go b/server/pkg/layer/item_builder.go new file mode 100644 index 000000000..f03ef5f80 --- /dev/null +++ b/server/pkg/layer/item_builder.go @@ -0,0 +1,103 @@ +package layer + +func ItemFromLayer(l Layer) *Item { + li, ok := l.(*Item) + if !ok { + return nil + } + return li +} + +func ItemFromLayerRef(l *Layer) *Item { + if l == nil { + return nil + } + li, ok := (*l).(*Item) + if !ok { + return nil + } + return li +} + +type ItemBuilder struct { + l *Item +} + +func NewItem() *ItemBuilder { + return &ItemBuilder{l: &Item{layerBase: layerBase{visible: true}}} +} + +func (b *ItemBuilder) Build() (*Item, error) { + if b.l.id.IsNil() { + return nil, ErrInvalidID + } + return b.l, nil +} + +func (b *ItemBuilder) MustBuild() *Item { + item, err := b.Build() + if err != nil { + panic(err) + } + return item +} + +func (b *ItemBuilder) base(layer layerBase) *ItemBuilder { + b.l.layerBase = layer + return b +} + +func (b *ItemBuilder) ID(id ID) *ItemBuilder { + b.l.id = id + return b +} + +func (b *ItemBuilder) NewID() *ItemBuilder { + b.l.id = NewID() + return b +} + +func (b *ItemBuilder) Scene(s SceneID) *ItemBuilder { + b.l.scene = s + return b +} + +func (b *ItemBuilder) Name(name string) *ItemBuilder { + b.l.name = name + return b +} + +func (b *ItemBuilder) IsVisible(visible bool) *ItemBuilder { + b.l.visible = visible + return b +} + +func (b *ItemBuilder) Plugin(plugin *PluginID) *ItemBuilder { + b.l.plugin = plugin.CopyRef() + return b +} + +func (b *ItemBuilder) Extension(extension *PluginExtensionID) *ItemBuilder { + b.l.extension = extension.CloneRef() + return b +} + +func (b *ItemBuilder) Property(p *PropertyID) *ItemBuilder { + b.l.property = p.CopyRef() + return b +} + +func (b *ItemBuilder) Infobox(infobox *Infobox) *ItemBuilder { + b.l.infobox = infobox + return b +} + +func (b *ItemBuilder) LinkedDataset(linkedDataset *DatasetID) *ItemBuilder { + b.l.linkedDataset = linkedDataset.CopyRef() + return b +} + +func (b *ItemBuilder) Tags(tags *TagList) *ItemBuilder { + b.l.tags = tags + return b +} diff --git a/server/pkg/layer/item_builder_test.go b/server/pkg/layer/item_builder_test.go new file mode 100644 index 000000000..4d83b01d2 --- /dev/null +++ b/server/pkg/layer/item_builder_test.go @@ -0,0 +1,13 @@ +package layer + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestItemBuilder_Tags(t *testing.T) { + l := NewTagList(nil) + b := NewItem().NewID().Tags(l).MustBuild() + assert.Same(t, l, b.Tags()) +} diff --git a/server/pkg/layer/item_test.go b/server/pkg/layer/item_test.go new file mode 100644 index 000000000..a803e2dec --- /dev/null +++ b/server/pkg/layer/item_test.go @@ -0,0 +1,3 @@ +package layer + +var _ Layer = &Item{} diff --git a/server/pkg/layer/layer.go b/server/pkg/layer/layer.go new file mode 100644 index 000000000..6f00affee --- /dev/null +++ b/server/pkg/layer/layer.go @@ -0,0 +1,230 @@ +package layer + +import ( + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/property" +) + +var ( + ErrDuplicatedTag = errors.New("duplicated tag") + ErrTagNotFound = errors.New("tag not found") +) + +type Layer interface { + ID() ID + Name() string + IsVisible() bool + Plugin() *PluginID + Extension() *PluginExtensionID + UsesPlugin() bool + Property() *PropertyID + HasInfobox() bool + Infobox() *Infobox + Scene() SceneID + Tags() *TagList + Rename(string) + SetVisible(bool) + SetInfobox(*Infobox) + SetPlugin(*PluginID) + Properties() []PropertyID + ValidateProperties(property.Map) error +} + +func ToLayerGroup(l Layer) *Group { + if lg, ok := l.(*Group); ok { + return lg + } + return nil +} + +func ToLayerGroupRef(l *Layer) *Group { + if l == nil { + return nil + } + l2 := *l + if lg, ok := l2.(*Group); ok { + return lg + } + return nil +} + +func ToLayerItem(l Layer) *Item { + if li, ok := l.(*Item); ok { + return li + } + return nil +} + +func ToLayerItemRef(l *Layer) *Item { + if l == nil { + return nil + } + l2 := *l + if li, ok := l2.(*Item); ok { + return li + } + return nil +} + +type layerBase struct { + id ID + name string + visible bool + plugin *PluginID + extension *PluginExtensionID + property *PropertyID + infobox *Infobox + scene SceneID + tags *TagList +} + +func (l *layerBase) ID() ID { + return l.id +} + +func (l *layerBase) IDRef() *ID { + if l == nil { + return nil + } + return l.id.Ref() +} + +func (l *layerBase) Name() string { + if l == nil { + return "" + } + return l.name +} + +func (l *layerBase) IsVisible() bool { + if l == nil { + return false + } + return l.visible +} + +func (l *layerBase) UsesPlugin() bool { + if l == nil { + return false + } + return l.plugin != nil && l.extension != nil +} + +func (l *layerBase) Plugin() *PluginID { + if l == nil { + return nil + } + return l.plugin.CopyRef() +} + +func (l *layerBase) Extension() *PluginExtensionID { + if l == nil { + return nil + } + return l.extension.CloneRef() +} + +func (l *layerBase) Property() *PropertyID { + if l == nil { + return nil + } + return l.property.CopyRef() +} + +func (l *layerBase) HasInfobox() bool { + if l == nil { + return false + } + return l.infobox != nil +} + +func (l *layerBase) Infobox() *Infobox { + if l == nil { + return nil + } + return l.infobox +} + +func (l *layerBase) Scene() SceneID { + return l.scene +} + +func (l *layerBase) Rename(name string) { + if l == nil { + return + } + l.name = name +} + +func (l *layerBase) SetVisible(visible bool) { + if l == nil { + return + } + l.visible = visible +} + +func (l *layerBase) SetInfobox(infobox *Infobox) { + if l == nil { + return + } + l.infobox = infobox +} + +func (l *layerBase) SetPlugin(plugin *PluginID) { + if l == nil { + return + } + l.plugin = plugin.CopyRef() +} + +func (l *layerBase) Properties() []PropertyID { + if l == nil { + return nil + } + res := []PropertyID{} + if l.property != nil { + res = append(res, *l.property) + } + if l.infobox != nil { + res = append(res, l.infobox.property) + for _, f := range l.infobox.fields { + res = append(res, f.property) + } + } + return res +} + +func (l *layerBase) ValidateProperties(pm property.Map) error { + if l == nil || pm == nil { + return nil + } + + // property + if l.property != nil { + if l.plugin == nil || l.extension == nil { + return errors.New("layer should have plugin id and extension id") + } + + psid := NewPropertySchemaID(*l.plugin, l.extension.String()) + + lp := pm[*l.property] + if lp == nil { + return errors.New("layer property does not exist") + } + + if !lp.Schema().Equal(psid) { + return errors.New("layer property has a invalid schema") + } + } else if l.plugin != nil || l.extension != nil { + return errors.New("layer should have property id") + } + + // infobox + if err := l.infobox.ValidateProperties(pm); err != nil { + return fmt.Errorf("infobox: %w", err) + } + + return nil +} diff --git a/server/pkg/layer/layer_test.go b/server/pkg/layer/layer_test.go new file mode 100644 index 000000000..3c208be26 --- /dev/null +++ b/server/pkg/layer/layer_test.go @@ -0,0 +1,4 @@ +package layer + +var _ Layer = &Item{} +var _ Layer = &Group{} diff --git a/server/pkg/layer/layerops/initializer.go b/server/pkg/layer/layerops/initializer.go new file mode 100644 index 000000000..c19433802 --- /dev/null +++ b/server/pkg/layer/layerops/initializer.go @@ -0,0 +1,63 @@ +package layerops + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" +) + +type LayerItem struct { + SceneID layer.SceneID + ParentLayerID layer.ID + Plugin *plugin.Plugin + ExtensionID *layer.PluginExtensionID + Index *int + LinkedDatasetID *layer.DatasetID + Name string + LinkablePropertySchema *property.Schema + LatLng *property.LatLng +} + +var ( + ErrExtensionTypeMustBePrimitive error = errors.New("extension type must be primitive") +) + +func (i LayerItem) Initialize() (*layer.Item, *property.Property, error) { + builder := layer.NewItem().NewID().Scene(i.SceneID) + + var p *property.Property + var err error + if i.Plugin != nil && i.ExtensionID != nil { + extension := i.Plugin.Extension(*i.ExtensionID) + if extension == nil || extension.Type() != plugin.ExtensionTypePrimitive { + return nil, nil, ErrExtensionTypeMustBePrimitive + } + + p, err = property.New(). + NewID(). + Schema(extension.Schema()). + Scene(i.SceneID). + Build() + + if err != nil { + return nil, nil, err + } + + p.UpdateLinkableValue(i.LinkablePropertySchema, property.ValueTypeLatLng.ValueFrom(i.LatLng)) + + builder. + Plugin(i.Plugin.ID().Ref()). + Extension(i.ExtensionID). + Property(p.ID().Ref()). + Name(i.Name) + } + + layerItem, err := builder.LinkedDataset(i.LinkedDatasetID).Build() + if err != nil { + return nil, nil, err + } + + return layerItem, p, nil +} diff --git a/server/pkg/layer/layerops/initializer_test.go b/server/pkg/layer/layerops/initializer_test.go new file mode 100644 index 000000000..f62399e67 --- /dev/null +++ b/server/pkg/layer/layerops/initializer_test.go @@ -0,0 +1,83 @@ +package layerops + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/stretchr/testify/assert" +) + +func TestInitialize(t *testing.T) { + lid := layer.NewID() + ps := plugin.MustPropertySchemaID("xxx~1.1.1/aa") + eid := plugin.ExtensionID("foo") + eid2 := plugin.ExtensionID("foo2") + e := plugin.NewExtension(). + ID("foo"). + Description(i18n.StringFrom("foo/des")). + Name(i18n.StringFrom("foo/name")). + Schema(ps). + Type(plugin.ExtensionTypePrimitive). + MustBuild() + e2 := plugin.NewExtension(). + ID("foo2"). + Type("not primitive"). + MustBuild() + es := append(make([]*plugin.Extension, 0), e) + es = append(es, e2) + p := plugin.New(). + ID(layer.MustPluginID("xxx~1.1.1")). + Schema(&ps). + Extensions(es). + MustBuild() + s := layer.NewSceneID() + + tests := []struct { + name string + sceneID *layer.SceneID + parentLayerID *layer.ID + plugin *plugin.Plugin + extID *layer.PluginExtensionID + err error + }{ + { + name: "Success", + sceneID: &s, + parentLayerID: &lid, + plugin: p, + extID: &eid, + err: nil, + }, + { + name: "extension type error", + sceneID: &s, + parentLayerID: &lid, + plugin: p, + extID: &eid2, + err: ErrExtensionTypeMustBePrimitive, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + layerItem, property, err := LayerItem{ + SceneID: *tt.sceneID, + ParentLayerID: *tt.parentLayerID, + Plugin: tt.plugin, + ExtensionID: tt.extID, + Name: tt.name, + }.Initialize() + if tt.err == nil { + assert.NoError(t, err) + assert.NotNil(t, layerItem) + assert.NotNil(t, property) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} diff --git a/server/pkg/layer/layerops/processor.go b/server/pkg/layer/layerops/processor.go new file mode 100644 index 000000000..46b7da580 --- /dev/null +++ b/server/pkg/layer/layerops/processor.go @@ -0,0 +1,30 @@ +package layerops + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/layer" +) + +type Processor struct { + RootLayerID layer.ID + LayerLoader layer.Loader +} + +type UninstallPluginResult struct { + ModifiedLayers layer.List + RemovedProperties []layer.PropertyID +} + +func (p Processor) UninstallPlugin(ctx context.Context, pluginID layer.PluginID) (res UninstallPluginResult, err error) { + err = p.LayerLoader.Walk(ctx, func(l layer.Layer, parents layer.GroupList) error { + // delete infobox fields + if removedProperties := l.Infobox().RemoveAllByPlugin(pluginID, nil); len(removedProperties) > 0 { + res.RemovedProperties = append(res.RemovedProperties, removedProperties...) + res.ModifiedLayers = append(res.ModifiedLayers, &l) + } + return nil + }, []layer.ID{p.RootLayerID}) + + return +} diff --git a/server/pkg/layer/layerops/processor_test.go b/server/pkg/layer/layerops/processor_test.go new file mode 100644 index 000000000..c9e499e08 --- /dev/null +++ b/server/pkg/layer/layerops/processor_test.go @@ -0,0 +1,36 @@ +package layerops + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/stretchr/testify/assert" +) + +func TestProcessor_UninstallPlugin(t *testing.T) { + sid := layer.NewSceneID() + pid := layer.MustPluginID("hoge~1.0.0") + pid2 := layer.MustPluginID("hoge~1.0.1") + ibf1 := layer.NewInfoboxField().NewID().Plugin(pid).Extension("a").Property(layer.NewPropertyID()).MustBuild() + ibf2 := layer.NewInfoboxField().NewID().Plugin(pid2).Extension("a").Property(layer.NewPropertyID()).MustBuild() + ib := layer.NewInfobox([]*layer.InfoboxField{ibf1, ibf2}, layer.NewPropertyID()) + l1 := layer.NewItem().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Plugin(&id.OfficialPluginID).MustBuild() + l2 := layer.NewItem().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Plugin(&id.OfficialPluginID).MustBuild() + l3 := layer.NewItem().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Plugin(&id.OfficialPluginID).Infobox(ib).MustBuild() + l4 := layer.NewGroup().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Layers(layer.NewIDList([]layer.ID{l1.ID(), l2.ID()})).MustBuild() + l5 := layer.NewGroup().NewID().Scene(sid).Layers(layer.NewIDList([]layer.ID{l3.ID(), l4.ID()})).MustBuild() + + res, err := Processor{ + LayerLoader: layer.LoaderFrom([]layer.Layer{l1, l2, l3, l4, l5}), + RootLayerID: l5.ID(), + }.UninstallPlugin(context.TODO(), pid) + + assert.NoError(t, err) + assert.Equal(t, UninstallPluginResult{ + ModifiedLayers: layer.List{l3.LayerRef()}, + RemovedProperties: []layer.PropertyID{ibf1.Property()}, + }, res) + assert.Equal(t, []*layer.InfoboxField{ibf2}, ib.Fields()) +} diff --git a/server/pkg/layer/list.go b/server/pkg/layer/list.go new file mode 100644 index 000000000..c43ce677d --- /dev/null +++ b/server/pkg/layer/list.go @@ -0,0 +1,305 @@ +package layer + +type List []*Layer + +func (ll List) Last() *Layer { + if len(ll) == 0 { + return nil + } + return ll[len(ll)-1] +} + +func (ll List) IDs() *IDList { + if len(ll) == 0 { + return nil + } + ids := make([]ID, 0, len(ll)) + for _, l := range ll.Deref() { + ids = append(ids, l.ID()) + } + return NewIDList(ids) +} + +func (ll List) Properties() []PropertyID { + if len(ll) == 0 { + return nil + } + ids := make([]PropertyID, 0, len(ll)) + for _, l := range ll.Deref() { + ids = append(ids, l.Properties()...) + } + return ids +} + +func (ll List) Pick(il *IDList) List { + if il == nil { + return nil + } + + layers := make(List, 0, il.LayerCount()) + for _, lid := range il.Layers() { + if l := ll.Find(lid); l != nil { + layers = append(layers, l) + } + } + return layers +} + +func (ll List) Find(lid ID) *Layer { + for _, l := range ll { + if l == nil { + continue + } + if (*l).ID() == lid { + return l + } + } + return nil +} + +func (ll List) FindByDataset(ds DatasetID) *Item { + for _, l := range ll { + if li := ItemFromLayerRef(l); li != nil { + dsid := li.LinkedDataset() + if dsid != nil && *dsid == ds { + return li + } + } + } + return nil +} + +func (ll List) ToLayerItemList() ItemList { + res := make(ItemList, 0, len(ll)) + for _, l := range ll { + if li := ItemFromLayerRef(l); li != nil { + res = append(res, li) + } + } + return res +} + +func (ll List) ToLayerGroupList() GroupList { + res := make(GroupList, 0, len(ll)) + for _, l := range ll { + if lg := GroupFromLayerRef(l); lg != nil { + res = append(res, lg) + } + } + return res +} + +func (ll List) SeparateLayerItemAndGroup() (ItemList, GroupList) { + resi := make(ItemList, 0, len(ll)) + resg := make(GroupList, 0, len(ll)) + for _, l := range ll { + if lg := GroupFromLayerRef(l); lg != nil { + resg = append(resg, lg) + } else if li := ItemFromLayerRef(l); li != nil { + resi = append(resi, li) + } + } + return resi, resg +} + +func (ll List) Deref() []Layer { + if ll == nil { + return nil + } + res := make([]Layer, 0, len(ll)) + for _, l := range ll { + if l != nil { + res = append(res, *l) + } else { + res = append(res, nil) + } + } + return res +} + +func (ll List) Loader() Loader { + return LoaderFrom(ll.Deref()) +} + +func (ll List) Map() Map { + m := make(Map, len(ll)) + m.Add(ll...) + return m +} + +func (ll List) Remove(lids ...ID) List { + if ll == nil { + return nil + } + + res := make(List, 0, len(ll)) + + for _, l := range ll { + if l == nil { + continue + } + hit := false + for _, lid := range lids { + if (*l).ID() == lid { + hit = true + break + } + } + if !hit { + res = append(res, l) + } + } + + return res +} + +type ItemList []*Item + +func (ll ItemList) FindByDataset(ds DatasetID) *Item { + for _, li := range ll { + dsid := li.LinkedDataset() + if dsid != nil && *dsid == ds { + return li + } + } + return nil +} + +func (ll ItemList) ToLayerList() List { + res := make(List, 0, len(ll)) + for _, l := range ll { + var layer Layer = l + res = append(res, &layer) + } + return res +} + +func (ll ItemList) Last() *Item { + if len(ll) == 0 { + return nil + } + return ll[len(ll)-1] +} + +type GroupList []*Group + +func (ll GroupList) ToLayerList() List { + res := make(List, 0, len(ll)) + for _, l := range ll { + var layer Layer = l + res = append(res, &layer) + } + return res +} + +func (ll GroupList) Last() *Group { + if len(ll) == 0 { + return nil + } + return ll[len(ll)-1] +} + +type Map map[ID]*Layer + +func MapFrom(l Layer) Map { + return List{&l}.Map() +} + +func (m Map) Add(layers ...*Layer) Map { + if m == nil { + m = map[ID]*Layer{} + } + for _, l := range layers { + if l == nil { + continue + } + l2 := *l + if l2 == nil { + continue + } + m[l2.ID()] = l + } + return m +} + +func (m Map) List() List { + if m == nil { + return nil + } + list := make(List, 0, len(m)) + for _, l := range m { + list = append(list, l) + } + return list +} + +func (m Map) Clone() Map { + if m == nil { + return Map{} + } + m2 := make(Map, len(m)) + for k, v := range m { + m2[k] = v + } + return m2 +} + +func (m Map) Merge(m2 Map) Map { + if m == nil { + return m2.Clone() + } + m3 := m.Clone() + if m2 == nil { + return m3 + } + + return m3.Add(m2.List()...) +} + +func (m Map) Pick(il *IDList) List { + if il == nil { + return nil + } + + layers := make(List, 0, il.LayerCount()) + for _, lid := range il.Layers() { + if l := m[lid]; l != nil { + layers = append(layers, l) + } + } + return layers +} + +func (m Map) Layer(i ID) Layer { + if l := m[i]; l != nil { + return *l + } + return nil +} + +func (m Map) Item(i ID) *Item { + if l := ToLayerItem(m.Layer(i)); l != nil { + return l + } + return nil +} + +func (m Map) Group(i ID) *Group { + if l := ToLayerGroup(m.Layer(i)); l != nil { + return l + } + return nil +} + +func (m Map) Keys() []ID { + keys := make([]ID, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + sortIDs(keys) + return keys +} + +func (m Map) Len() int { + return len(m) +} diff --git a/server/pkg/layer/list_test.go b/server/pkg/layer/list_test.go new file mode 100644 index 000000000..01dc68772 --- /dev/null +++ b/server/pkg/layer/list_test.go @@ -0,0 +1,85 @@ +package layer + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestList_IDs(t *testing.T) { + sid := NewSceneID() + l1 := NewID() + l2 := NewID() + + tests := []struct { + name string + target List + want *IDList + }{ + { + name: "ok", + target: List{ + New().ID(l1).Scene(sid).Item().MustBuild().LayerRef(), + New().ID(l2).Scene(sid).Group().MustBuild().LayerRef(), + }, + want: NewIDList([]ID{l1, l2}), + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.IDs()) + }) + } +} + +func TestList_Properties(t *testing.T) { + sid := NewSceneID() + p1 := NewPropertyID() + p2 := NewPropertyID() + p3 := NewPropertyID() + + tests := []struct { + name string + target List + want []PropertyID + }{ + { + name: "ok", + target: List{ + New().NewID().Scene(sid).Property(&p1).Item().MustBuild().LayerRef(), + New().NewID().Scene(sid).Infobox(NewInfobox([]*InfoboxField{ + {property: p3}, + }, p2)).Group().MustBuild().LayerRef(), + }, + want: []PropertyID{p1, p2, p3}, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Properties()) + }) + } +} + +func TestList_Remove(t *testing.T) { + sid := NewSceneID() + l1 := NewItem().NewID().Scene(sid).MustBuild() + l2 := NewItem().NewID().Scene(sid).MustBuild() + l3 := NewItem().NewID().Scene(sid).MustBuild() + assert.Equal(t, List{l2.LayerRef()}, List{l1.LayerRef(), l2.LayerRef()}.Remove(l1.ID(), l3.ID())) + assert.Equal(t, List{l1.LayerRef(), l2.LayerRef()}, List{l1.LayerRef(), l2.LayerRef()}.Remove()) + assert.Equal(t, List(nil), List(nil).Remove(l1.ID())) + assert.Equal(t, List{}, List{}.Remove(l1.ID())) +} diff --git a/server/pkg/layer/loader.go b/server/pkg/layer/loader.go new file mode 100644 index 000000000..ef4e597f6 --- /dev/null +++ b/server/pkg/layer/loader.go @@ -0,0 +1,72 @@ +package layer + +import ( + "context" + "errors" +) + +type Loader func(context.Context, ...ID) (List, error) +type LoaderByScene func(context.Context, SceneID) (List, error) + +var WalkerSkipChildren = errors.New("LAYER_WALKER_SKIP_CHILDREN") + +func LoaderFrom(data []Layer) Loader { + return func(ctx context.Context, ids ...ID) (List, error) { + res := make([]*Layer, 0, len(ids)) + for _, i := range ids { + found := false + for _, d := range data { + if i == d.ID() { + res = append(res, &d) + found = true + break + } + } + if !found { + res = append(res, nil) + } + } + return res, nil + } +} + +func LoaderFromMap(data map[ID]Layer) Loader { + return func(ctx context.Context, ids ...ID) (List, error) { + res := make([]*Layer, 0, len(ids)) + for _, i := range ids { + if d, ok := data[i]; ok { + res = append(res, &d) + } else { + res = append(res, nil) + } + } + return res, nil + } +} + +func (l Loader) Walk(ctx context.Context, walker func(Layer, GroupList) error, init []ID) error { + var walk func(ids []ID, parents GroupList) error + walk = func(ids []ID, parents GroupList) error { + loaded, err := l(ctx, ids...) + if err != nil { + return err + } + for _, l := range loaded.Deref() { + if l == nil { + continue + } + if err := walker(l, parents); err == WalkerSkipChildren { + continue + } else if err != nil { + return err + } + if lg := ToLayerGroup(l); lg != nil && lg.Layers().LayerCount() > 0 { + if err := walk(lg.Layers().Layers(), append(parents, lg)); err != nil { + return err + } + } + } + return nil + } + return walk(init, nil) +} diff --git a/server/pkg/layer/loader_test.go b/server/pkg/layer/loader_test.go new file mode 100644 index 000000000..f5a5ad4d4 --- /dev/null +++ b/server/pkg/layer/loader_test.go @@ -0,0 +1,79 @@ +package layer + +import ( + "context" + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLoader_Walk(t *testing.T) { + sid := NewSceneID() + l1 := NewItem().NewID().Scene(sid).MustBuild() + l2 := NewItem().NewID().Scene(sid).MustBuild() + l3 := NewItem().NewID().Scene(sid).MustBuild() + l4 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]ID{l1.ID(), l2.ID()})).MustBuild() + l5 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]ID{l3.ID(), l4.ID()})).MustBuild() + w := LoaderFrom([]Layer{l1, l2, l3, l4, l5}) + + layers := []Layer{} + parents := []GroupList{} + err := w.Walk(context.TODO(), func(l Layer, p GroupList) error { + layers = append(layers, l) + parents = append(parents, p) + return nil + }, []ID{l5.ID()}) + + assert.NoError(t, err) + assert.Equal(t, []Layer{l5, l3, l4, l1, l2}, layers) + assert.Equal(t, []GroupList{nil, {l5}, {l5}, {l5, l4}, {l5, l4}}, parents) +} + +func TestLoader_Walk2(t *testing.T) { + sid := NewSceneID() + l1 := NewItem().NewID().Scene(sid).MustBuild() + l2 := NewItem().NewID().Scene(sid).MustBuild() + l3 := NewItem().NewID().Scene(sid).MustBuild() + l4 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]ID{l1.ID(), l2.ID()})).MustBuild() + l5 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]ID{l3.ID(), l4.ID()})).MustBuild() + w := LoaderFrom([]Layer{l1, l2, l3, l4, l5}) + + layers := []Layer{} + parents := []GroupList{} + err := w.Walk(context.TODO(), func(l Layer, p GroupList) error { + layers = append(layers, l) + parents = append(parents, p) + return WalkerSkipChildren + }, []ID{l5.ID()}) + + assert.NoError(t, err) + assert.Equal(t, []Layer{l5}, layers) + assert.Equal(t, []GroupList{nil}, parents) +} + +func TestLoader_Walk3(t *testing.T) { + sid := NewSceneID() + l1 := NewItem().NewID().Scene(sid).MustBuild() + l2 := NewItem().NewID().Scene(sid).MustBuild() + l3 := NewItem().NewID().Scene(sid).MustBuild() + l4 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]ID{l1.ID(), l2.ID()})).MustBuild() + l5 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]ID{l3.ID(), l4.ID()})).MustBuild() + w := LoaderFrom([]Layer{l1, l2, l3, l4, l5}) + + err := errors.New("Error") + layers := []Layer{} + parents := []GroupList{} + err2 := w.Walk(context.TODO(), func(l Layer, p GroupList) error { + layers = append(layers, l) + parents = append(parents, p) + if l == l4 { + return err + } + return nil + }, []ID{l5.ID()}) + + assert.Same(t, err, err2) + assert.Equal(t, []Layer{l5, l3, l4}, layers) + assert.Equal(t, []GroupList{nil, {l5}, {l5}}, parents) +} diff --git a/server/pkg/layer/merged.go b/server/pkg/layer/merged.go new file mode 100644 index 000000000..6ac4217d5 --- /dev/null +++ b/server/pkg/layer/merged.go @@ -0,0 +1,208 @@ +package layer + +import ( + "github.com/reearth/reearth-backend/pkg/property" +) + +// Merged represents a merged layer from two layers +type Merged struct { + Original ID + Parent *ID + Name string + Scene SceneID + Property *property.MergedMetadata + Infobox *MergedInfobox + PluginID *PluginID + ExtensionID *PluginExtensionID + IsVisible bool + Tags []MergedTag +} + +// MergedTag represents a merged tag from two layers +type MergedTag struct { + ID TagID + Tags []MergedTag +} + +// MergedInfobox represents a merged info box from two layers +type MergedInfobox struct { + Property *property.MergedMetadata + Fields []*MergedInfoboxField +} + +// MergedInfoboxField represents a field of MergedInfobox +type MergedInfoboxField struct { + ID InfoboxFieldID + Plugin PluginID + Extension PluginExtensionID + Property *property.MergedMetadata +} + +// Merge merges two layers +func Merge(o Layer, p *Group) *Merged { + if o == nil || p != nil && o.Scene() != p.Scene() { + return nil + } + + return &Merged{ + Original: o.ID(), + Parent: p.IDRef().CopyRef(), + Scene: o.Scene(), + Name: o.Name(), + PluginID: o.Plugin().CopyRef(), + ExtensionID: o.Extension().CloneRef(), + Property: &property.MergedMetadata{ + Original: o.Property(), + Parent: p.Property(), + LinkedDataset: ToLayerItem(o).LinkedDataset(), + }, + IsVisible: o.IsVisible(), + Tags: MergeTags(o.Tags(), p.Tags()), + Infobox: MergeInfobox(o.Infobox(), p.Infobox(), ToLayerItem(o).LinkedDataset()), + } +} + +// MergeInfobox merges two tag lists +func MergeTags(o, _p *TagList) []MergedTag { + // Currently parent tags are ignored + tags := o.Tags() + if len(tags) == 0 { + return nil + } + res := make([]MergedTag, 0, len(tags)) + for _, t := range tags { + tags := TagGroupFrom(t).Children() + + var tags2 []MergedTag + if len(tags) > 0 { + tags2 = make([]MergedTag, 0, len(tags)) + for _, t := range tags { + tags2 = append(tags2, MergedTag{ID: t.ID()}) + } + } + + res = append(res, MergedTag{ + ID: t.ID(), + Tags: tags2, + }) + } + return res +} + +// MergeInfobox merges two infoboxes +func MergeInfobox(o *Infobox, p *Infobox, linked *DatasetID) *MergedInfobox { + if o == nil && p == nil { + return nil + } + + var ibf []*InfoboxField + if o != nil { + ibf = o.Fields() + } else if p != nil { + ibf = p.Fields() + } + + fields := make([]*MergedInfoboxField, 0, len(ibf)) + for _, f := range ibf { + p := f.Property() + fields = append(fields, &MergedInfoboxField{ + ID: f.ID(), + Plugin: f.Plugin(), + Extension: f.Extension(), + Property: &property.MergedMetadata{ + Original: &p, + Parent: nil, + LinkedDataset: linked, + }, + }) + } + + return &MergedInfobox{ + Fields: fields, + Property: &property.MergedMetadata{ + Original: o.PropertyRef(), + Parent: p.PropertyRef(), + LinkedDataset: linked, + }, + } +} + +// Properties returns all property IDs in Merged +func (m *Merged) Properties() []PropertyID { + if m == nil { + return nil + } + added := map[PropertyID]struct{}{} + result := []PropertyID{} + if m.Property != nil { + if m.Property.Original != nil { + t := *m.Property.Original + if _, ok := added[t]; !ok { + result = append(result, t) + added[t] = struct{}{} + } + } + if m.Property.Parent != nil { + t := *m.Property.Parent + if _, ok := added[t]; !ok { + result = append(result, t) + added[t] = struct{}{} + } + } + } + if m.Infobox != nil && m.Infobox.Property != nil { + if m.Infobox.Property.Original != nil { + t := *m.Infobox.Property.Original + if _, ok := added[t]; !ok { + result = append(result, t) + added[t] = struct{}{} + } + } + if m.Infobox.Property.Parent != nil { + t := *m.Infobox.Property.Parent + if _, ok := added[t]; !ok { + result = append(result, t) + added[t] = struct{}{} + } + } + } + if m.Infobox != nil { + for _, f := range m.Infobox.Fields { + if f.Property.Original != nil { + t := *f.Property.Original + if _, ok := added[t]; !ok { + result = append(result, t) + added[t] = struct{}{} + } + } + if f.Property.Parent != nil { + t := *f.Property.Parent + if _, ok := added[t]; !ok { + result = append(result, t) + added[t] = struct{}{} + } + } + } + } + return result +} + +func (m *Merged) AllTags() (res []MergedTag) { + if m == nil { + return nil + } + for _, t := range m.Tags { + res = append(res, append([]MergedTag{t}, t.Tags...)...) + } + return res +} + +func (m *Merged) AllTagIDs() (res []TagID) { + if m == nil { + return nil + } + for _, t := range m.AllTags() { + res = append(res, t.ID) + } + return res +} diff --git a/server/pkg/layer/merged_test.go b/server/pkg/layer/merged_test.go new file mode 100644 index 000000000..e10ba5856 --- /dev/null +++ b/server/pkg/layer/merged_test.go @@ -0,0 +1,416 @@ +package layer + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestMerge(t *testing.T) { + scene := NewSceneID() + dataset1 := NewDatasetID() + p := MustPluginID("xxx~1.1.1") + e := PluginExtensionID("foo") + + t1 := NewTagID() + t2 := NewTagID() + t3 := NewTagID() + itemProperty := NewPropertyID() + groupProperty := NewPropertyID() + ib1pr := NewPropertyID() + ib2pr := NewPropertyID() + f1pr := NewPropertyID() + f2pr := NewPropertyID() + f3pr := NewPropertyID() + + f1 := NewInfoboxField().NewID().Plugin(p).Extension(e).Property(f1pr).MustBuild() + f2 := NewInfoboxField().NewID().Plugin(p).Extension(e).Property(f2pr).MustBuild() + f3 := NewInfoboxField().NewID().Plugin(p).Extension(e).Property(f3pr).MustBuild() + + // no-infobox and no-linked + itemLayer1 := NewItem(). + NewID(). + Scene(scene). + Plugin(&p). + Extension(&e). + Property(&itemProperty). + IsVisible(false). + MustBuild() + // no-infobox + itemLayer2 := NewItem(). + NewID(). + Scene(scene). + Plugin(&p). + Extension(&e). + Property(&itemProperty). + LinkedDataset(&dataset1). + Tags(NewTagList([]Tag{NewTagGroup(t1, []*TagItem{NewTagItem(t2)}), NewTagItem(t3)})). + MustBuild() + // infobox + itemLayer3 := NewItem(). + NewID(). + Scene(scene). + Plugin(&p). + Extension(&e). + Property(&itemProperty). + LinkedDataset(&dataset1). + Infobox(NewInfobox([]*InfoboxField{f1, f3}, ib1pr)). + MustBuild() + // infobox but field is empty + itemLayer4 := NewItem(). + NewID(). + Scene(scene). + Plugin(&p). + Extension(&e). + Property(&itemProperty). + LinkedDataset(&dataset1). + Infobox(NewInfobox(nil, ib1pr)). + MustBuild() + // no-infobox + groupLayer1 := NewGroup(). + NewID(). + Scene(scene). + Plugin(&p). + Extension(&e). + Property(&groupProperty). + Tags(NewTagList([]Tag{NewTagGroup(t1, []*TagItem{NewTagItem(t2)}), NewTagItem(t3)})). + MustBuild() + // infobox + groupLayer2 := NewGroup(). + NewID(). + Scene(scene). + Plugin(&p). + Extension(&e). + Property(&groupProperty). + Infobox(NewInfobox([]*InfoboxField{f2, f3}, ib2pr)). + MustBuild() + + tests := []struct { + name string + o Layer + p *Group + want *Merged + }{ + { + name: "nil", + o: nil, + p: nil, + want: nil, + }, + { + name: "parent only", + o: nil, + p: groupLayer1, + want: nil, + }, + { + name: "only original without infobox and link", + o: itemLayer1, + p: nil, + want: &Merged{ + Original: itemLayer1.ID(), + Parent: nil, + Scene: scene, + PluginID: &p, + ExtensionID: &e, + IsVisible: false, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: nil, + LinkedDataset: nil, + }, + }, + }, + { + name: "only original with infobox", + o: itemLayer3, + p: nil, + want: &Merged{ + Original: itemLayer3.ID(), + Parent: nil, + Scene: scene, + PluginID: &p, + ExtensionID: &e, + IsVisible: true, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: nil, + LinkedDataset: &dataset1, + }, + Infobox: &MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + Fields: []*MergedInfoboxField{ + { + ID: f1.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, + }, + }, + }, + { + name: "original without infobox, parent without infobox", + o: itemLayer2, + p: groupLayer1, + want: &Merged{ + Original: itemLayer2.ID(), + Parent: groupLayer1.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + IsVisible: true, + Tags: []MergedTag{ + {ID: t1, Tags: []MergedTag{{ID: t2}}}, + {ID: t3}, + }, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + }, + }, + { + name: "original with infobox, parent without infobox", + o: itemLayer3, + p: groupLayer1, + want: &Merged{ + Original: itemLayer3.ID(), + Parent: groupLayer1.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + IsVisible: true, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + Infobox: &MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + Fields: []*MergedInfoboxField{ + { + ID: f1.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, + }, + }, + }, + { + name: "original without infobox, parent with infobox", + o: itemLayer2, + p: groupLayer2, + want: &Merged{ + Original: itemLayer2.ID(), + Parent: groupLayer2.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + IsVisible: true, + Tags: []MergedTag{ + {ID: t1, Tags: []MergedTag{{ID: t2}}}, + {ID: t3}, + }, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + Infobox: &MergedInfobox{ + Property: &property.MergedMetadata{ + Original: nil, + Parent: &ib2pr, + LinkedDataset: &dataset1, + }, + Fields: []*MergedInfoboxField{ + { + ID: f2.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f2pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, + }, + }, + }, + { + name: "original with infobox, parent with infobox", + o: itemLayer3, + p: groupLayer2, + want: &Merged{ + Original: itemLayer3.ID(), + Parent: groupLayer2.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + IsVisible: true, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + Infobox: &MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib1pr, + Parent: &ib2pr, + LinkedDataset: &dataset1, + }, + Fields: []*MergedInfoboxField{ + { + ID: f1.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, + }, + }, + }, + { + name: "original with infobox but field is empty, parent with infobox", + o: itemLayer4, + p: groupLayer2, + want: &Merged{ + Original: itemLayer4.ID(), + Parent: groupLayer2.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + IsVisible: true, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + Infobox: &MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib1pr, + Parent: &ib2pr, + LinkedDataset: &dataset1, + }, + Fields: []*MergedInfoboxField{}, + }, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + actual := Merge(tt.o, tt.p) + assert.Equal(t, tt.want, actual) + }) + } +} + +func TestMergedProperties(t *testing.T) { + itemProperty := NewPropertyID() + groupProperty := NewPropertyID() + ib1pr := NewPropertyID() + ib2pr := NewPropertyID() + f1pr := NewPropertyID() + f2pr := NewPropertyID() + f3pr := NewPropertyID() + + merged := &Merged{ + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + }, + Infobox: &MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib1pr, + Parent: &ib2pr, + }, + Fields: []*MergedInfoboxField{ + { + Property: &property.MergedMetadata{ + Original: &f1pr, + Parent: &f2pr, + }, + }, + { + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + }, + }, + }, + }, + } + + assert.Equal(t, []PropertyID{ + itemProperty, groupProperty, ib1pr, ib2pr, f1pr, f2pr, f3pr, + }, merged.Properties()) +} diff --git a/server/pkg/layer/merging/merged.go b/server/pkg/layer/merging/merged.go new file mode 100644 index 000000000..a6d274b05 --- /dev/null +++ b/server/pkg/layer/merging/merged.go @@ -0,0 +1,123 @@ +package merging + +import ( + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +var ( + _ MergedLayer = &MergedLayerGroup{} // must implement Layer + _ MergedLayer = &MergedLayerItem{} // must implement Layer +) + +type MergedLayer interface { + Common() *MergedLayerCommon + AllDatasets() layer.DatasetIDList + AllTags() layer.TagIDList +} + +type MergedLayerGroup struct { + MergedLayerCommon + Children []MergedLayer +} + +type MergedLayerItem struct { + MergedLayerCommon +} + +type MergedLayerCommon struct { + layer.Merged + Property *property.Merged + Infobox *MergedInfobox +} + +type MergedInfobox struct { + layer.MergedInfobox + Property *property.Merged + Fields []*MergedInfoboxField +} + +type MergedInfoboxField struct { + layer.MergedInfoboxField + Property *property.Merged +} + +func (l *MergedLayerGroup) Common() *MergedLayerCommon { + if l == nil { + return nil + } + return &l.MergedLayerCommon +} + +func (l *MergedLayerItem) Common() *MergedLayerCommon { + if l == nil { + return nil + } + return &l.MergedLayerCommon +} + +func (l *MergedLayerCommon) Datasets() layer.DatasetIDList { + return l.datasetIDSet().List() +} + +func (l *MergedLayerCommon) Tags() []layer.TagID { + return l.tagIDSet().List() +} + +func (l *MergedLayerCommon) datasetIDSet() *layer.DatasetIDSet { + if l == nil { + return nil + } + res := layer.NewDatasetIDSet() + res.Add(l.Property.Datasets()...) + res.Add(l.Infobox.Property.Datasets()...) + for _, f := range l.Infobox.Fields { + res.Add(f.Property.Datasets()...) + } + return res +} + +func (l *MergedLayerCommon) tagIDSet() *layer.TagIDSet { + if l == nil { + return nil + } + res := layer.NewTagIDSet() + res.Add(l.Merged.AllTagIDs()...) + return res +} + +func (l *MergedLayerItem) AllDatasets() layer.DatasetIDList { + if l == nil { + return nil + } + return l.Datasets() +} + +func (l *MergedLayerItem) AllTags() layer.TagIDList { + if l == nil { + return nil + } + return l.Tags() +} + +func (l *MergedLayerGroup) AllDatasets() layer.DatasetIDList { + if l == nil { + return nil + } + d := l.datasetIDSet() + for _, l := range l.Children { + d.Add(l.AllDatasets()...) + } + return d.List() +} + +func (l *MergedLayerGroup) AllTags() layer.TagIDList { + if l == nil { + return nil + } + d := l.tagIDSet() + for _, l := range l.Children { + d.Add(l.AllTags()...) + } + return d.List() +} diff --git a/server/pkg/layer/merging/merger.go b/server/pkg/layer/merging/merger.go new file mode 100644 index 000000000..7c1153bfe --- /dev/null +++ b/server/pkg/layer/merging/merger.go @@ -0,0 +1,119 @@ +package merging + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Merger struct { + LayerLoader layer.Loader + PropertyLoader property.Loader +} + +func (m *Merger) MergeLayer(ctx context.Context, l layer.Layer, parent *layer.Group) (MergedLayer, error) { + if m == nil || l == nil { + return nil, nil + } + + common, err := m.mergeCommon(ctx, l, parent) + if err != nil { + return nil, err + } + if common == nil { + return nil, nil + } + + if li := layer.ToLayerItem(l); li != nil { + // item + return &MergedLayerItem{*common}, nil + } else if lg := layer.ToLayerGroup(l); lg != nil { + // group + layers, err := m.LayerLoader(ctx, lg.Layers().Layers()...) + if err != nil { + return nil, err + } + + children := make([]MergedLayer, 0, len(layers)) + for _, c := range layers { + if c == nil { + continue + } + ml, err := m.MergeLayer(ctx, *c, lg) + if err != nil { + return nil, err + } + children = append(children, ml) + } + + return &MergedLayerGroup{ + MergedLayerCommon: *common, + Children: children, + }, nil + } + + return nil, nil +} + +func (m *Merger) MergeLayerFromID(ctx context.Context, i layer.ID, parent *layer.Group) (MergedLayer, error) { + l, err := m.LayerLoader(ctx, i) + if err != nil { + return nil, err + } + if len(l) == 0 || l[0] == nil { + return nil, nil + } + return m.MergeLayer(ctx, *l[0], parent) +} + +func (m *Merger) mergeCommon(ctx context.Context, original layer.Layer, parent *layer.Group) (p *MergedLayerCommon, e error) { + ml := layer.Merge(original, parent) + if ml == nil { + return + } + properties, err := m.PropertyLoader(ctx, ml.Properties()...) + if err != nil { + e = err + return + } + + var infobox *MergedInfobox + if ml.Infobox != nil { + fields := make([]*MergedInfoboxField, 0, len(ml.Infobox.Fields)) + for _, f := range ml.Infobox.Fields { + fields = append(fields, &MergedInfoboxField{ + MergedInfoboxField: *f, + Property: mergeProperty(f.Property, properties), + }) + } + infobox = &MergedInfobox{ + MergedInfobox: *ml.Infobox, + Fields: fields, + Property: mergeProperty(ml.Infobox.Property, properties), + } + } + + p = &MergedLayerCommon{ + Merged: *ml, + Property: mergeProperty(ml.Property, properties), + Infobox: infobox, + } + return +} + +func mergeProperty(ml *property.MergedMetadata, properties []*property.Property) *property.Merged { + var op, pp *property.Property + for _, p := range properties { + if ml.Original != nil && p.ID() == *ml.Original { + op = p + } + if ml.Parent != nil && p.ID() == *ml.Parent { + pp = p + } + if (ml.Original == nil || op != nil) && (ml.Parent == nil || pp != nil) { + break + } + } + return ml.Merge(op, pp) +} diff --git a/server/pkg/layer/merging/merger_test.go b/server/pkg/layer/merging/merger_test.go new file mode 100644 index 000000000..573e72fab --- /dev/null +++ b/server/pkg/layer/merging/merger_test.go @@ -0,0 +1,158 @@ +package merging + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestMergeLayer(t *testing.T) { + // ids + scene := layer.NewSceneID() + dataset1 := layer.NewDatasetID() + ps := property.MustSchemaID("xxx~1.1.1/aa") + p := layer.MustPluginID("xxx~1.1.1") + e := layer.PluginExtensionID("foo") + itemProperty := property.NewID() + groupProperty := property.NewID() + ib1pr := property.NewID() + ib2pr := property.NewID() + fpr := property.NewID() + l1 := layer.NewID() + l2 := layer.NewID() + l1if1 := layer.NewInfoboxFieldID() + + // property loader + ploader := property.LoaderFrom([]*property.Property{ + property.New().ID(itemProperty).Scene(scene).Schema(ps).MustBuild(), + property.New().ID(groupProperty).Scene(scene).Schema(ps).MustBuild(), + property.New().ID(ib1pr).Scene(scene).Schema(ps).MustBuild(), + property.New().ID(ib2pr).Scene(scene).Schema(ps).MustBuild(), + property.New().ID(fpr).Scene(scene).Schema(ps).MustBuild(), + }) + + // layer loader + lloader := layer.LoaderFrom([]layer.Layer{ + layer.NewItem(). + ID(l1). + Scene(scene). + Property(&itemProperty). + LinkedDataset(&dataset1). + Infobox(layer.NewInfobox(nil, ib1pr)). + IsVisible(false). + MustBuild(), + layer.NewGroup(). + ID(l2). + Scene(scene). + Property(&groupProperty). + Infobox(layer.NewInfobox([]*layer.InfoboxField{ + layer.NewInfoboxField().ID(l1if1).Plugin(p).Extension(e).Property(fpr).MustBuild(), + }, ib2pr)). + Layers(layer.NewIDList([]layer.ID{l1})). + IsVisible(false). + MustBuild(), + }) + + expectedInfobox := layer.MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib1pr, + Parent: &ib2pr, + LinkedDataset: &dataset1, + }, + Fields: []*layer.MergedInfoboxField{}, + } + expectedInfoboxField2 := layer.MergedInfoboxField{ + ID: l1if1, + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &fpr, + }, + } + expectedInfobox2 := layer.MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib2pr, + }, + Fields: []*layer.MergedInfoboxField{&expectedInfoboxField2}, + } + + expected := &MergedLayerGroup{ + MergedLayerCommon: MergedLayerCommon{ + Merged: layer.Merged{ + Original: l2, + Scene: scene, + IsVisible: false, + Property: &property.MergedMetadata{ + Original: &groupProperty, + }, + Infobox: &expectedInfobox2, + }, + Infobox: &MergedInfobox{ + MergedInfobox: expectedInfobox2, + Property: &property.Merged{ + Original: &ib2pr, + Schema: ps, + }, + Fields: []*MergedInfoboxField{ + { + MergedInfoboxField: expectedInfoboxField2, + Property: &property.Merged{ + Original: &fpr, + Schema: ps, + }, + }, + }, + }, + Property: &property.Merged{ + Original: &groupProperty, + Schema: ps, + }, + }, + Children: []MergedLayer{ + &MergedLayerItem{ + MergedLayerCommon{ + Merged: layer.Merged{ + Original: l1, + Parent: &l2, + Scene: scene, + IsVisible: false, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + Infobox: &expectedInfobox, + }, + Infobox: &MergedInfobox{ + MergedInfobox: expectedInfobox, + Property: &property.Merged{ + Original: &ib1pr, + Parent: &ib2pr, + Schema: ps, + LinkedDataset: &dataset1, + }, + Fields: []*MergedInfoboxField{}, + }, + Property: &property.Merged{ + Original: &itemProperty, + Parent: &groupProperty, + Schema: ps, + LinkedDataset: &dataset1, + }, + }, + }, + }, + } + + merger := Merger{ + PropertyLoader: ploader, + LayerLoader: lloader, + } + actual, err := merger.MergeLayerFromID(context.Background(), l2, nil) + + assert.NoError(t, err) + assert.Equal(t, expected, actual) +} diff --git a/server/pkg/layer/merging/sealed.go b/server/pkg/layer/merging/sealed.go new file mode 100644 index 000000000..4a6dbf665 --- /dev/null +++ b/server/pkg/layer/merging/sealed.go @@ -0,0 +1,105 @@ +package merging + +import ( + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +var ( + _ SealedLayer = &SealedLayerGroup{} // must implement SealedLayer + _ SealedLayer = &SealedLayerItem{} // must implement SealedLayer +) + +type SealedLayer interface { + Common() *SealedLayerCommon + Flatten() []*SealedLayerItem + Group() *SealedLayerGroup + Item() *SealedLayerItem +} + +type SealedLayerGroup struct { + SealedLayerCommon + Children []SealedLayer +} + +type SealedLayerItem struct { + SealedLayerCommon +} + +type SealedLayerCommon struct { + layer.Merged + Property *property.Sealed + Infobox *SealedInfobox + Tags []SealedTag +} + +type SealedInfobox struct { + layer.MergedInfobox + Property *property.Sealed + Fields []*SealedInfoboxField +} + +type SealedInfoboxField struct { + layer.MergedInfoboxField + Property *property.Sealed +} + +type SealedTag struct { + ID layer.TagID + Label string + Tags []SealedTag +} + +func (l *SealedLayerGroup) Common() *SealedLayerCommon { + if l == nil { + return nil + } + return &l.SealedLayerCommon +} + +func (l *SealedLayerGroup) Flatten() []*SealedLayerItem { + if l == nil { + return nil + } + layers := []*SealedLayerItem{} + for _, c := range l.Children { + layers = append(layers, c.Flatten()...) + } + return layers +} + +func (l *SealedLayerGroup) Item() *SealedLayerItem { + return nil +} + +func (l *SealedLayerGroup) Group() *SealedLayerGroup { + if l == nil { + return nil + } + return l +} + +func (l *SealedLayerItem) Common() *SealedLayerCommon { + if l == nil { + return nil + } + return &l.SealedLayerCommon +} + +func (l *SealedLayerItem) Flatten() []*SealedLayerItem { + if l == nil { + return nil + } + return []*SealedLayerItem{l} +} + +func (l *SealedLayerItem) Item() *SealedLayerItem { + if l == nil { + return nil + } + return l +} + +func (*SealedLayerItem) Group() *SealedLayerGroup { + return nil +} diff --git a/server/pkg/layer/merging/sealer.go b/server/pkg/layer/merging/sealer.go new file mode 100644 index 000000000..d4a6bacf1 --- /dev/null +++ b/server/pkg/layer/merging/sealer.go @@ -0,0 +1,173 @@ +package merging + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/tag" +) + +type Sealer struct { + DatasetGraphLoader dataset.GraphLoader + TagLoader tag.Loader +} + +func (s *Sealer) Seal(ctx context.Context, m MergedLayer) (SealedLayer, error) { + if s == nil || m == nil { + return nil, nil + } + + var tagMap tag.Map + if tags := m.AllTags(); len(tags) > 0 { + tags2, err := s.TagLoader(ctx, tags...) + if err != nil { + return nil, err + } + tagMap = tag.MapFromRefList(tags2) + } + + return s.sealLayer(ctx, m, tagMap) +} + +func (s *Sealer) sealLayer(ctx context.Context, m MergedLayer, tagMap tag.Map) (SealedLayer, error) { + if s == nil || m == nil { + return nil, nil + } + if g, ok := m.(*MergedLayerGroup); ok { + return s.sealLayerGroup(ctx, g, tagMap) + } + if i, ok := m.(*MergedLayerItem); ok { + return s.sealLayerItem(ctx, i, tagMap) + } + return nil, nil +} + +func (s *Sealer) sealLayerGroup(ctx context.Context, m *MergedLayerGroup, tagMap tag.Map) (*SealedLayerGroup, error) { + if s == nil || m == nil { + return nil, nil + } + + c, err := s.sealLayerCommon(ctx, &m.MergedLayerCommon, tagMap) + if err != nil { + return nil, err + } + if c == nil { + return nil, nil + } + + children := make([]SealedLayer, 0, len(m.Children)) + for _, c := range m.Children { + s, err := s.sealLayer(ctx, c, tagMap) + if err != nil { + return nil, err + } + children = append(children, s) + } + + return &SealedLayerGroup{ + SealedLayerCommon: *c, + Children: children, + }, nil +} + +func (s *Sealer) sealLayerItem(ctx context.Context, m *MergedLayerItem, tagMap tag.Map) (*SealedLayerItem, error) { + if s == nil || m == nil { + return nil, nil + } + c, err := s.sealLayerCommon(ctx, &m.MergedLayerCommon, tagMap) + if err != nil { + return nil, err + } + if c == nil { + return nil, nil + } + return &SealedLayerItem{ + SealedLayerCommon: *c, + }, nil +} + +func (s *Sealer) sealLayerCommon(ctx context.Context, m *MergedLayerCommon, tagMap tag.Map) (*SealedLayerCommon, error) { + if s == nil || m == nil { + return nil, nil + } + p, err := s.sealProperty(ctx, m.Property) + if err != nil { + return nil, err + } + ib, err := s.sealInfobox(ctx, m.Infobox) + if err != nil { + return nil, err + } + tags := s.sealTags(m.Merged.Tags, tagMap) + return &SealedLayerCommon{ + Merged: m.Merged, + Property: p, + Infobox: ib, + Tags: tags, + }, nil +} + +func (s *Sealer) sealInfobox(ctx context.Context, m *MergedInfobox) (*SealedInfobox, error) { + if s == nil || m == nil { + return nil, nil + } + p, err := s.sealProperty(ctx, m.Property) + if err != nil { + return nil, err + } + fields := make([]*SealedInfoboxField, 0, len(m.Fields)) + for _, f := range m.Fields { + s, err := s.sealInfoboxField(ctx, f) + if err != nil { + return nil, err + } + fields = append(fields, s) + } + return &SealedInfobox{ + MergedInfobox: m.MergedInfobox, + Property: p, + Fields: fields, + }, nil +} + +func (s *Sealer) sealInfoboxField(ctx context.Context, m *MergedInfoboxField) (*SealedInfoboxField, error) { + if s == nil || m == nil { + return nil, nil + } + p, err := s.sealProperty(ctx, m.Property) + if err != nil { + return nil, err + } + return &SealedInfoboxField{ + MergedInfoboxField: m.MergedInfoboxField, + Property: p, + }, nil +} + +func (s *Sealer) sealProperty(ctx context.Context, m *property.Merged) (*property.Sealed, error) { + if s == nil { + return nil, nil + } + return property.Seal(ctx, m, s.DatasetGraphLoader) +} + +func (s *Sealer) sealTags(m []layer.MergedTag, tagMap tag.Map) []SealedTag { + if len(m) == 0 { + return nil + } + res := make([]SealedTag, 0, len(m)) + for _, t := range m { + tt := SealedTag{ + ID: t.ID, + Tags: s.sealTags(t.Tags, tagMap), + Label: "", + } + if ttt, ok := tagMap[t.ID]; ok { + tt.Label = ttt.Label() + } + res = append(res, tt) + } + return res +} diff --git a/server/pkg/layer/tag.go b/server/pkg/layer/tag.go new file mode 100644 index 000000000..715886f44 --- /dev/null +++ b/server/pkg/layer/tag.go @@ -0,0 +1,220 @@ +package layer + +type TagList struct { + tags []Tag +} + +type Tag interface { + ID() TagID + Clone() Tag +} + +type TagItem struct { + id TagID +} + +type TagGroup struct { + id TagID + children []*TagItem +} + +func NewTagItem(t TagID) *TagItem { + if t.IsNil() { + return nil + } + return &TagItem{ + id: t, + } +} + +func (t *TagItem) ID() TagID { + if t == nil { + return TagID{} + } + return t.id +} + +func TagItemFrom(t Tag) *TagItem { + t2, _ := t.(*TagItem) + return t2 +} + +func (t *TagItem) Clone() Tag { + return t.CloneItem() +} + +func (t *TagItem) CloneItem() *TagItem { + if t == nil { + return nil + } + return NewTagItem(t.id) +} + +func NewTagGroup(t TagID, children []*TagItem) *TagGroup { + if t.IsNil() { + return nil + } + return &TagGroup{ + id: t, + children: append(children[:0:0], children...), + } +} + +func TagGroupFrom(t Tag) *TagGroup { + t2, _ := t.(*TagGroup) + return t2 +} + +func (t *TagGroup) ID() TagID { + if t == nil { + return TagID{} + } + return t.id +} + +func (t *TagGroup) Children() []*TagItem { + if t == nil { + return nil + } + return append(t.children[:0:0], t.children...) +} + +func (t *TagGroup) Find(ti TagID) *TagItem { + if t == nil { + return nil + } + for _, tag := range t.children { + if tag.ID() == ti { + return tag + } + } + return nil +} + +func (t *TagGroup) Add(ti *TagItem) bool { + if t == nil || ti == nil || t.Find(ti.ID()) != nil { + return false + } + t.children = append(t.children, ti) + return true +} + +func (t *TagGroup) Delete(ti TagID) (res bool) { + if t == nil { + return + } + for i := 0; i < len(t.children); i++ { + c := t.children[i] + if c.ID() == ti { + t.children = append(t.children[:i], t.children[i+1:]...) + i-- + res = true + } + } + return +} + +func (t *TagGroup) Clone() Tag { + return t.CloneGroup() +} + +func (t *TagGroup) CloneGroup() *TagGroup { + if t == nil { + return nil + } + return NewTagGroup(t.id, t.children) +} + +func NewTagList(tags []Tag) *TagList { + return &TagList{tags: append(tags[:0:0], tags...)} +} + +func (t *TagList) Tags() []Tag { + if t == nil { + return nil + } + return append(t.tags[:0:0], t.tags...) +} + +func (t *TagList) Add(ti Tag) bool { + if t == nil || ti == nil || t.Has(ti.ID()) || TagItemFrom(ti) == nil && TagGroupFrom(ti) == nil { + return false + } + t.tags = append(t.tags, ti) + return true +} + +func (t *TagList) Delete(ti TagID) (res bool) { + if t == nil { + return + } + for i := 0; i < len(t.tags); i++ { + c := t.tags[i] + if c.ID() == ti { + t.tags = append(t.tags[:i], t.tags[i+1:]...) + i-- + res = true + } else if TagGroupFrom(c).Delete(ti) { + res = true + } + } + return +} + +func (t *TagList) Has(ti TagID) bool { + g, i := t.Find(ti) + return g != nil || i != nil +} + +func (t *TagList) Find(ti TagID) (*TagGroup, *TagItem) { + if t == nil { + return nil, nil + } + for _, t := range t.tags { + g := TagGroupFrom(t) + if t.ID() == ti { + return g, TagItemFrom(t) + } + if i := g.Find(ti); i != nil { + return g, i + } + } + return nil, nil +} + +func (t *TagList) FindItem(ti TagID) *TagItem { + _, i := t.Find(ti) + return i +} + +func (t *TagList) FindGroup(ti TagID) *TagGroup { + g, i := t.Find(ti) + if i != nil { + return nil + } + return g +} + +func (t *TagList) RootItems() []*TagItem { + if t == nil { + return nil + } + items := make([]*TagItem, 0, len(t.tags)) + for _, t := range t.tags { + if i := TagItemFrom(t); i != nil { + items = append(items, i) + } + } + return items +} + +func (t *TagList) IsEmpty() bool { + return t == nil || len(t.tags) == 0 +} + +func (t *TagList) Clone() *TagList { + if t == nil { + return nil + } + return NewTagList(t.tags) +} diff --git a/server/pkg/layer/tag_test.go b/server/pkg/layer/tag_test.go new file mode 100644 index 000000000..09942fcb6 --- /dev/null +++ b/server/pkg/layer/tag_test.go @@ -0,0 +1,1103 @@ +package layer + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +var _ Tag = &TagItem{} +var _ Tag = &TagGroup{} + +func TestNewTagItem(t *testing.T) { + tag := NewTagID() + type args struct { + t TagID + } + + tests := []struct { + name string + args args + want *TagItem + }{ + { + name: "ok", + args: args{t: tag}, + want: &TagItem{id: tag}, + }, + { + name: "nil id", + args: args{t: TagID{}}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, NewTagItem(tt.args.t)) + }) + } +} + +func TestTagItemFrom(t *testing.T) { + tag := NewTagID() + type args struct { + t Tag + } + + tests := []struct { + name string + args args + want *TagItem + }{ + { + name: "item", + args: args{t: &TagItem{id: tag}}, + want: &TagItem{id: tag}, + }, + { + name: "group", + args: args{t: &TagGroup{id: tag}}, + want: nil, + }, + { + name: "nil item", + args: args{t: (*TagItem)(nil)}, + want: nil, + }, + { + name: "nil group", + args: args{t: (*TagGroup)(nil)}, + want: nil, + }, + { + name: "nil", + args: args{t: nil}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, TagItemFrom(tt.args.t)) + }) + } +} + +func TestTagItem_ID(t *testing.T) { + tag := NewTagID() + + tests := []struct { + name string + target *TagItem + want TagID + }{ + { + name: "ok", + target: &TagItem{id: tag}, + want: tag, + }, + { + name: "nil", + target: nil, + want: TagID{}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.ID()) + }) + } +} + +func TestTagItem_Clone(t *testing.T) { + tag := NewTagID() + + tests := []struct { + name string + target *TagItem + }{ + { + name: "ok", + target: &TagItem{id: tag}, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Clone() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestTagItem_CloneItem(t *testing.T) { + tag := NewTagID() + + tests := []struct { + name string + target *TagItem + }{ + { + name: "ok", + target: &TagItem{id: tag}, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.CloneItem() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestNewTagGroup(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + t TagID + children []*TagItem + } + + tests := []struct { + name string + args args + want *TagGroup + }{ + { + name: "ok", + args: args{ + t: tag1, + children: []*TagItem{ + {id: tag2}, + {id: tag3}, + }, + }, + want: &TagGroup{ + id: tag1, + children: []*TagItem{ + {id: tag2}, + {id: tag3}, + }, + }, + }, + { + name: "nil id", + args: args{t: TagID{}}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, NewTagGroup(tt.args.t, tt.args.children)) + }) + } +} + +func TestTagGroupFrom(t *testing.T) { + tag := NewTagID() + type args struct { + t Tag + } + + tests := []struct { + name string + args args + want *TagGroup + }{ + { + name: "group", + args: args{t: &TagGroup{id: tag}}, + want: &TagGroup{id: tag}, + }, + { + name: "item", + args: args{t: &TagItem{id: tag}}, + want: nil, + }, + { + name: "nil item", + args: args{t: (*TagItem)(nil)}, + want: nil, + }, + { + name: "nil group", + args: args{t: (*TagGroup)(nil)}, + want: nil, + }, + { + name: "nil", + args: args{t: nil}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, TagGroupFrom(tt.args.t)) + }) + } +} + +func TestTagGroup_ID(t *testing.T) { + tag := NewTagID() + + tests := []struct { + name string + target *TagGroup + want TagID + }{ + { + name: "ok", + target: &TagGroup{id: tag}, + want: tag, + }, + { + name: "nil", + target: nil, + want: TagID{}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.ID()) + }) + } +} + +func TestTagGroup_Children(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + + tests := []struct { + name string + target *TagGroup + want []*TagItem + }{ + { + name: "ok", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}}}, + want: []*TagItem{{id: tag2}}, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Children() + assert.Equal(t, tt.want, res) + if tt.want != nil { + assert.NotSame(t, tt.target.children, res) + } + }) + } +} + +func TestTagGroup_Find(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + ti TagID + } + + tests := []struct { + name string + target *TagGroup + args args + want *TagItem + }{ + { + name: "ok", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}, {id: tag3}}}, + args: args{ti: tag2}, + want: &TagItem{id: tag2}, + }, + { + name: "not found", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}, {id: tag3}}}, + args: args{ti: tag1}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{ti: tag1}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Find(tt.args.ti) + assert.Equal(t, tt.want, res) + }) + } +} + +func TestTagGroup_Add(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + ti *TagItem + } + + tests := []struct { + name string + target *TagGroup + args args + want bool + wantChildren []*TagItem + }{ + { + name: "ok", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}, + args: args{ti: &TagItem{id: tag2}}, + want: true, + wantChildren: []*TagItem{{id: tag3}, {id: tag2}}, + }, + { + name: "not added", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}, {id: tag3}}}, + args: args{ti: &TagItem{id: tag2}}, + want: false, + wantChildren: []*TagItem{{id: tag2}, {id: tag3}}, + }, + { + name: "nil item", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}}}, + args: args{ti: nil}, + wantChildren: []*TagItem{{id: tag2}}, + }, + { + name: "nil", + target: nil, + args: args{ti: &TagItem{id: tag2}}, + wantChildren: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Add(tt.args.ti)) + assert.Equal(t, tt.wantChildren, tt.target.Children()) + }) + } +} + +func TestTagGroup_Delete(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + ti TagID + } + + tests := []struct { + name string + target *TagGroup + args args + want bool + wantChildren []*TagItem + }{ + { + name: "ok", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}, {id: tag3}}}, + args: args{ti: tag2}, + want: true, + wantChildren: []*TagItem{{id: tag3}}, + }, + { + name: "not found", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}, {id: tag3}}}, + args: args{ti: tag1}, + want: false, + wantChildren: []*TagItem{{id: tag2}, {id: tag3}}, + }, + { + name: "nil", + target: nil, + args: args{ti: tag1}, + wantChildren: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Delete(tt.args.ti)) + assert.Equal(t, tt.wantChildren, tt.target.Children()) + }) + } +} + +func TestTagGroup_Clone(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + + tests := []struct { + name string + target *TagGroup + }{ + { + name: "ok", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}}}, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Clone() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestTagGroup_CloneGroup(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + + tests := []struct { + name string + target *TagGroup + }{ + { + name: "ok", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}}}, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.CloneGroup() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + if tt.target.children != nil { + assert.NotSame(t, tt.target.children, res.children) + } + } + }) + } +} + +func TestNewTagList(t *testing.T) { + tag := NewTagID() + + type args struct { + tags []Tag + } + + tests := []struct { + name string + args args + want *TagList + }{ + { + name: "ok", + args: args{tags: []Tag{&TagItem{id: tag}}}, + want: &TagList{tags: []Tag{&TagItem{id: tag}}}, + }, + { + name: "nil", + args: args{tags: nil}, + want: &TagList{tags: nil}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := NewTagList(tt.args.tags) + assert.Equal(t, tt.want, res) + assert.NotSame(t, res.tags, tt.args.tags) + }) + } +} + +func TestTagList_Tags(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + tests := []struct { + name string + target *TagList + want []Tag + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{&TagGroup{id: tag1, children: []*TagItem{{id: tag2}}}, &TagItem{id: tag3}}, + }, + want: []Tag{&TagGroup{id: tag1, children: []*TagItem{{id: tag2}}}, &TagItem{id: tag3}}, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Tags() + assert.Equal(t, tt.want, res) + if tt.want != nil { + assert.NotSame(t, tt.target.tags, res) + } + }) + } +} + +func TestTagList_Add(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + ti Tag + } + + tests := []struct { + name string + target *TagList + args args + want bool + wantChildren []Tag + }{ + { + name: "item added", + target: &TagList{ + tags: []Tag{ + &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: &TagItem{id: tag2}}, + want: true, + wantChildren: []Tag{ + &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}, + &TagItem{id: tag2}, + }, + }, + { + name: "group added", + target: &TagList{ + tags: []Tag{ + &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: &TagGroup{id: tag2}}, + want: true, + wantChildren: []Tag{ + &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}, + &TagGroup{id: tag2}, + }, + }, + { + name: "not added", + target: &TagList{ + tags: []Tag{&TagItem{id: tag2}, &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}}, + }, + args: args{ti: &TagGroup{id: tag2}}, + want: false, + wantChildren: []Tag{ + &TagItem{id: tag2}, + &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}, + }, + }, + { + name: "nil tag", + target: &TagList{ + tags: []Tag{&TagItem{id: tag2}, &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}}, + }, + args: args{ti: nil}, + want: false, + wantChildren: []Tag{&TagItem{id: tag2}, &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}}, + }, + { + name: "nil item tag", + target: &TagList{ + tags: []Tag{&TagItem{id: tag2}, &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}}, + }, + args: args{ti: (*TagItem)(nil)}, + want: false, + wantChildren: []Tag{&TagItem{id: tag2}, &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}}, + }, + { + name: "nil", + args: args{ti: &TagGroup{id: tag2}}, + target: nil, + wantChildren: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Add(tt.args.ti)) + assert.Equal(t, tt.wantChildren, tt.target.Tags()) + }) + } +} + +func TestTagList_Delete(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + tag4 := NewTagID() + + type args struct { + ti TagID + } + + tests := []struct { + name string + target *TagList + args args + want bool + wantTags []Tag + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag1}, + want: true, + wantTags: []Tag{ + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + { + name: "not found", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag4}, + want: false, + wantTags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + { + name: "nil", + target: nil, + args: args{ti: tag1}, + wantTags: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Delete(tt.args.ti)) + assert.Equal(t, tt.wantTags, tt.target.Tags()) + }) + } +} + +func TestTagList_Find(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + tag4 := NewTagID() + + type args struct { + ti TagID + } + + tests := []struct { + name string + target *TagList + args args + wantGroup *TagGroup + wantItem *TagItem + }{ + { + name: "group", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag2}, + wantGroup: &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + wantItem: nil, + }, + { + name: "item", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag3}, + wantGroup: &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + wantItem: &TagItem{id: tag3}, + }, + { + name: "root item", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag1}, + wantGroup: nil, + wantItem: &TagItem{id: tag1}, + }, + { + name: "not found", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag4}, + wantGroup: nil, + wantItem: nil, + }, + { + name: "nil", + target: nil, + args: args{ti: tag1}, + wantGroup: nil, + wantItem: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + resGroup, resItem := tt.target.Find(tt.args.ti) + assert.Equal(t, tt.wantGroup, resGroup) + assert.Equal(t, tt.wantItem, resItem) + }) + } +} + +func TestTagList_FindGroup(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + ti TagID + } + + tests := []struct { + name string + target *TagList + args args + want *TagGroup + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag2}, + want: &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + { + name: "not found", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag1}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{ti: tag1}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.FindGroup(tt.args.ti)) + }) + } +} + +func TestTagList_FindItem(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + ti TagID + } + + tests := []struct { + name string + target *TagList + args args + want *TagItem + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag3}, + want: &TagItem{id: tag3}, + }, + { + name: "root item", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag1}, + want: &TagItem{id: tag1}, + }, + { + name: "not found", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag2}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{ti: tag1}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.FindItem(tt.args.ti)) + }) + } +} + +func TestTagList_RootItems(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + tests := []struct { + name string + target *TagList + want []*TagItem + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + want: []*TagItem{{id: tag1}}, + }, + { + name: "no roots", + target: &TagList{ + tags: []Tag{ + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + want: []*TagItem{}, + }, + { + name: "empty", + target: &TagList{}, + want: []*TagItem{}, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.RootItems()) + }) + } +} + +func TestTagList_IsEmpty(t *testing.T) { + tag := NewTagID() + + tests := []struct { + name string + target *TagList + want bool + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{&TagItem{id: tag}}, + }, + want: false, + }, + { + name: "empty", + target: &TagList{}, + want: true, + }, + { + name: "nil", + target: nil, + want: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.IsEmpty()) + }) + } +} + +func TestTagList_Clone(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + tests := []struct { + name string + target *TagList + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Clone() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} diff --git a/server/pkg/log/gceformatter.go b/server/pkg/log/gceformatter.go new file mode 100644 index 000000000..0df77cc26 --- /dev/null +++ b/server/pkg/log/gceformatter.go @@ -0,0 +1,123 @@ +package log + +// https://github.com/znly/logrus-gce with some modifications +// Apache License 2.0 + +import ( + "encoding/json" + "errors" + "fmt" + "runtime" + "strings" + "sync" + "time" + + "github.com/sirupsen/logrus" +) + +type severity string + +const ( + severityDEBUG severity = "DEBUG" + severityINFO severity = "INFO" + severityWARNING severity = "WARNING" + severityERROR severity = "ERROR" + severityCRITICAL severity = "CRITICAL" + severityALERT severity = "ALERT" +) + +var ( + levelsLogrusToGCE = map[logrus.Level]severity{ + logrus.DebugLevel: severityDEBUG, + logrus.InfoLevel: severityINFO, + logrus.WarnLevel: severityWARNING, + logrus.ErrorLevel: severityERROR, + logrus.FatalLevel: severityCRITICAL, + logrus.PanicLevel: severityALERT, + } +) + +var ( + stackSkips = map[logrus.Level]int{} + stackSkipsMu = sync.RWMutex{} +) + +var ( + ErrSkipNotFound = errors.New("could not find skips for log level") +) + +func getSkipLevel(level logrus.Level) (int, error) { + stackSkipsMu.RLock() + if skip, ok := stackSkips[level]; ok { + defer stackSkipsMu.RUnlock() + return skip, nil + } + stackSkipsMu.RUnlock() + + stackSkipsMu.Lock() + defer stackSkipsMu.Unlock() + if skip, ok := stackSkips[level]; ok { + return skip, nil + } + + // detect until we escape logrus back to the client package + // skip out of runtime and logrusgce package, hence 3 + stackSkipsCallers := make([]uintptr, 20) + runtime.Callers(3, stackSkipsCallers) + for i, pc := range stackSkipsCallers { + f := runtime.FuncForPC(pc) + if strings.HasPrefix(f.Name(), "github.com/sirupsen/logrus") { + continue + } + stackSkips[level] = i + 1 + return i + 1, nil + } + return 0, ErrSkipNotFound +} + +type GCEFormatter struct { + withSourceInfo bool +} + +func NewGCEFormatter(withSourceInfo bool) *GCEFormatter { + return &GCEFormatter{withSourceInfo: withSourceInfo} +} + +func (f *GCEFormatter) Format(entry *logrus.Entry) ([]byte, error) { + data := make(logrus.Fields, len(entry.Data)+3) + for k, v := range entry.Data { + switch v := v.(type) { + case error: + // Otherwise errors are ignored by `encoding/json` + // https://github.com/Sirupsen/logrus/issues/137 + data[k] = v.Error() + default: + data[k] = v + } + } + + data["time"] = entry.Time.Format(time.RFC3339Nano) + data["severity"] = levelsLogrusToGCE[entry.Level] + data["logMessage"] = entry.Message + + if f.withSourceInfo { + skip, err := getSkipLevel(entry.Level) + if err != nil { + return nil, err + } + if pc, file, line, ok := runtime.Caller(skip); ok { + f := runtime.FuncForPC(pc) + data["sourceLocation"] = map[string]interface{}{ + "file": file, + "line": line, + "functionName": f.Name(), + } + } + } + + serialized, err := json.Marshal(data) + if err != nil { + return nil, fmt.Errorf("Failed to marshal fields to JSON, %v", err) + } + return append(serialized, '\n'), nil +} diff --git a/server/pkg/log/log.go b/server/pkg/log/log.go new file mode 100644 index 000000000..a42a6f002 --- /dev/null +++ b/server/pkg/log/log.go @@ -0,0 +1,108 @@ +package log + +import ( + "os" + + "github.com/sirupsen/logrus" +) + +func init() { + gcp, _ := os.LookupEnv("GOOGLE_CLOUD_PROJECT") + logrus.SetLevel(logrus.InfoLevel) + if gcp != "" { + logrus.SetFormatter(NewGCEFormatter(false)) + } else { + logrus.SetFormatter(&logrus.TextFormatter{ + DisableColors: false, + FullTimestamp: true, + }) + } +} + +func Tracef(format string, args ...interface{}) { + logrus.Tracef(format, args...) +} + +func Debugf(format string, args ...interface{}) { + logrus.Debugf(format, args...) +} + +func Infof(format string, args ...interface{}) { + logrus.Infof(format, args...) +} + +func Printf(format string, args ...interface{}) { + logrus.Printf(format, args...) +} + +func Warnf(format string, args ...interface{}) { + logrus.Warnf(format, args...) +} + +func Errorf(format string, args ...interface{}) { + logrus.Errorf(format, args...) +} + +func Fatalf(format string, args ...interface{}) { + logrus.Fatalf(format, args...) +} + +func Trace(args ...interface{}) { + logrus.Trace(args...) +} + +func Debug(args ...interface{}) { + logrus.Debug(args...) +} + +func Info(args ...interface{}) { + logrus.Info(args...) +} + +func Print(args ...interface{}) { + logrus.Print(args...) +} + +func Warn(args ...interface{}) { + logrus.Warn(args...) +} + +func Error(args ...interface{}) { + logrus.Error(args...) +} + +func Fatal(args ...interface{}) { + logrus.Fatal(args...) +} + +func Traceln(args ...interface{}) { + logrus.Traceln(args...) +} + +func Debugln(args ...interface{}) { + logrus.Debugln(args...) +} + +func Infoln(args ...interface{}) { + logrus.Infoln(args...) +} + +func Println(args ...interface{}) { + logrus.Println(args...) +} + +func Warnln(args ...interface{}) { + logrus.Warnln(args...) +} + +func Errorln(args ...interface{}) { + logrus.Errorln(args...) +} + +func Fatalln(args ...interface{}) { + logrus.Fatalln(args...) +} + +func Panicf(format string, args ...interface{}) { + logrus.Panicf(format, args...) +} diff --git a/server/pkg/plugin/builder.go b/server/pkg/plugin/builder.go new file mode 100644 index 000000000..1a3364df7 --- /dev/null +++ b/server/pkg/plugin/builder.go @@ -0,0 +1,74 @@ +package plugin + +import ( + "github.com/reearth/reearth-backend/pkg/i18n" +) + +type Builder struct { + p *Plugin +} + +func New() *Builder { + return &Builder{p: &Plugin{}} +} + +func (b *Builder) Build() (*Plugin, error) { + if b.p.id.IsNil() { + return nil, ErrInvalidID + } + return b.p, nil +} + +func (b *Builder) MustBuild() *Plugin { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *Builder) ID(id ID) *Builder { + b.p.id = id + return b +} + +func (b *Builder) Name(name i18n.String) *Builder { + b.p.name = name.Clone() + return b +} + +func (b *Builder) Author(author string) *Builder { + b.p.author = author + return b +} + +func (b *Builder) Description(description i18n.String) *Builder { + b.p.description = description.Clone() + return b +} + +func (b *Builder) RepositoryURL(repositoryURL string) *Builder { + b.p.repositoryURL = repositoryURL + return b +} + +func (b *Builder) Extensions(extensions []*Extension) *Builder { + if len(extensions) == 0 { + b.p.extensions = nil + b.p.extensionOrder = nil + return b + } + + b.p.extensions = make(map[ExtensionID]*Extension, len(extensions)) + b.p.extensionOrder = make([]ExtensionID, 0, len(extensions)) + for _, e := range extensions { + b.p.extensions[e.ID()] = e + b.p.extensionOrder = append(b.p.extensionOrder, e.ID()) + } + return b +} + +func (b *Builder) Schema(schema *PropertySchemaID) *Builder { + b.p.schema = schema.CopyRef() + return b +} diff --git a/server/pkg/plugin/builder_test.go b/server/pkg/plugin/builder_test.go new file mode 100644 index 000000000..46ea26a3e --- /dev/null +++ b/server/pkg/plugin/builder_test.go @@ -0,0 +1,218 @@ +package plugin + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/stretchr/testify/assert" +) + +func TestBuilder_ID(t *testing.T) { + var b = New() + res := b.ID(MustID("aaa~1.1.1")).MustBuild() + assert.Equal(t, MustID("aaa~1.1.1"), res.ID()) +} + +func TestBuilder_Name(t *testing.T) { + var b = New() + res := b.ID(MustID("aaa~1.1.1")).Name(i18n.StringFrom("fooo")).MustBuild() + assert.Equal(t, i18n.StringFrom("fooo"), res.Name()) +} + +func TestBuilder_Author(t *testing.T) { + var b = New() + res := b.ID(MustID("aaa~1.1.1")).Author("xxx").MustBuild() + assert.Equal(t, "xxx", res.Author()) +} + +func TestBuilder_Description(t *testing.T) { + var b = New() + res := b.ID(MustID("aaa~1.1.1")).Description(i18n.StringFrom("ddd")).MustBuild() + assert.Equal(t, i18n.StringFrom("ddd"), res.Description()) +} + +func TestBuilder_Schema(t *testing.T) { + tests := []struct { + name string + sid, expected *PropertySchemaID + }{ + { + name: "nil schema", + sid: nil, + expected: nil, + }, + { + name: "build schema", + sid: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + expected: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := New().ID(MustID("aaa~1.1.1")).Schema(tt.sid).MustBuild() + assert.Equal(t, tt.expected, res.Schema()) + }) + } +} + +func TestBuilder_Extensions(t *testing.T) { + b := New() + ext := []*Extension{ + NewExtension().ID("xxx").MustBuild(), + NewExtension().ID("yyy").MustBuild(), + } + res := b.ID(MustID("aaa~1.1.1")).Extensions(ext).MustBuild() + assert.Equal(t, ext, res.Extensions()) +} + +func TestBuilder_RepositoryURL(t *testing.T) { + var b = New() + res := b.ID(MustID("aaa~1.1.1")).RepositoryURL("hoge").MustBuild() + assert.Equal(t, "hoge", res.RepositoryURL()) +} + +func TestBuilder_Build(t *testing.T) { + type args struct { + id ID + author, repositoryURL string + pname, description i18n.String + ext []*Extension + schema *PropertySchemaID + } + + tests := []struct { + name string + args args + expected *Plugin + err error // skip for now as error is always nil + }{ + { + name: "success build new plugin", + args: args{ + id: MustID("hoge~0.1.0"), + author: "aaa", + repositoryURL: "uuu", + pname: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + ext: []*Extension{ + NewExtension().ID("xxx").MustBuild(), + NewExtension().ID("yyy").MustBuild(), + }, + schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + }, + expected: &Plugin{ + id: MustID("hoge~0.1.0"), + name: i18n.StringFrom("nnn"), + author: "aaa", + description: i18n.StringFrom("ddd"), + repositoryURL: "uuu", + extensions: map[ExtensionID]*Extension{ + ExtensionID("xxx"): NewExtension().ID("xxx").MustBuild(), + ExtensionID("yyy"): NewExtension().ID("yyy").MustBuild(), + }, + extensionOrder: []ExtensionID{ExtensionID("xxx"), ExtensionID("yyy")}, + schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + p, err := New(). + ID(tt.args.id). + Extensions(tt.args.ext). + RepositoryURL(tt.args.repositoryURL). + Description(tt.args.description). + Name(tt.args.pname). + Schema(tt.args.schema). + Author(tt.args.author). + Build() + if tt.err == nil { + assert.Equal(t, tt.expected, p) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + type args struct { + author, repositoryURL string + id ID + pname, description i18n.String + ext []*Extension + schema *PropertySchemaID + } + + tests := []struct { + name string + args args + expected *Plugin + err error // skip for now as error is always nil + }{ + { + name: "success build new plugin", + args: args{ + id: MustID("hoge~0.1.0"), + author: "aaa", + repositoryURL: "uuu", + pname: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + ext: []*Extension{ + NewExtension().ID("xxx").MustBuild(), + NewExtension().ID("yyy").MustBuild(), + }, + schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + }, + expected: &Plugin{ + id: MustID("hoge~0.1.0"), + name: i18n.StringFrom("nnn"), + author: "aaa", + description: i18n.StringFrom("ddd"), + repositoryURL: "uuu", + extensions: map[ExtensionID]*Extension{ + ExtensionID("xxx"): NewExtension().ID("xxx").MustBuild(), + ExtensionID("yyy"): NewExtension().ID("yyy").MustBuild(), + }, + extensionOrder: []ExtensionID{ExtensionID("xxx"), ExtensionID("yyy")}, + schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + build := func() *Plugin { + t.Helper() + return New(). + ID(tt.args.id). + Extensions(tt.args.ext). + RepositoryURL(tt.args.repositoryURL). + Description(tt.args.description). + Name(tt.args.pname). + Schema(tt.args.schema). + Author(tt.args.author). + MustBuild() + } + + if tt.err != nil { + assert.PanicsWithValue(t, tt.err, func() { _ = build() }) + } else { + assert.Equal(t, tt.expected, build()) + } + }) + } +} + +func TestNew(t *testing.T) { + assert.NotNil(t, New()) +} diff --git a/server/pkg/plugin/extension.go b/server/pkg/plugin/extension.go new file mode 100644 index 000000000..083b8ae12 --- /dev/null +++ b/server/pkg/plugin/extension.go @@ -0,0 +1,97 @@ +package plugin + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +type ExtensionType string + +var ( + ErrPluginExtensionDuplicated error = errors.New("plugin extension duplicated") + ExtensionTypePrimitive ExtensionType = "primitive" + ExtensionTypeWidget ExtensionType = "widget" + ExtensionTypeBlock ExtensionType = "block" + ExtensionTypeVisualizer ExtensionType = "visualizer" + ExtensionTypeInfobox ExtensionType = "infobox" + ExtensionTypeCluster ExtensionType = "cluster" +) + +type Extension struct { + id ExtensionID + extensionType ExtensionType + name i18n.String + description i18n.String + icon string + schema PropertySchemaID + visualizer visualizer.Visualizer + singleOnly bool + widgetLayout *WidgetLayout +} + +func (w *Extension) ID() ExtensionID { + return w.id +} + +func (w *Extension) Type() ExtensionType { + return w.extensionType +} + +func (w *Extension) Name() i18n.String { + return w.name.Clone() +} + +func (w *Extension) Description() i18n.String { + return w.description.Clone() +} + +func (w *Extension) Icon() string { + return w.icon +} + +func (w *Extension) Schema() PropertySchemaID { + return w.schema +} + +func (w *Extension) Visualizer() visualizer.Visualizer { + return w.visualizer +} + +func (w *Extension) SingleOnly() bool { + return w.singleOnly +} + +func (w *Extension) WidgetLayout() *WidgetLayout { + if w == nil { + return nil + } + return w.widgetLayout +} + +func (w *Extension) Rename(name i18n.String) { + w.name = name.Clone() + +} + +func (w *Extension) SetDescription(des i18n.String) { + w.description = des.Clone() +} + +func (w *Extension) Clone() *Extension { + if w == nil { + return nil + } + return &Extension{ + id: w.id, + extensionType: w.extensionType, + name: w.name.Clone(), + description: w.description.Clone(), + icon: w.icon, + schema: w.schema.Clone(), + visualizer: w.visualizer, + singleOnly: w.singleOnly, + widgetLayout: w.widgetLayout.Clone(), + } +} diff --git a/server/pkg/plugin/extension_builder.go b/server/pkg/plugin/extension_builder.go new file mode 100644 index 000000000..a28142b2b --- /dev/null +++ b/server/pkg/plugin/extension_builder.go @@ -0,0 +1,87 @@ +package plugin + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +type ExtensionBuilder struct { + p *Extension + s bool +} + +func NewExtension() *ExtensionBuilder { + return &ExtensionBuilder{p: &Extension{}} +} + +func (b *ExtensionBuilder) Build() (*Extension, error) { + if string(b.p.id) == "" { + return nil, ErrInvalidID + } + if !b.s { + if b.p.extensionType == ExtensionTypeVisualizer || b.p.extensionType == ExtensionTypeInfobox { + return nil, errors.New("cannot build system extension") + } + } + return b.p, nil +} + +func (b *ExtensionBuilder) MustBuild() *Extension { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *ExtensionBuilder) ID(id ExtensionID) *ExtensionBuilder { + b.p.id = id + return b +} + +func (b *ExtensionBuilder) Name(name i18n.String) *ExtensionBuilder { + b.p.name = name.Clone() + return b +} + +func (b *ExtensionBuilder) Type(extensionType ExtensionType) *ExtensionBuilder { + b.p.extensionType = extensionType + return b +} + +func (b *ExtensionBuilder) Description(description i18n.String) *ExtensionBuilder { + b.p.description = description.Clone() + return b +} + +func (b *ExtensionBuilder) Icon(icon string) *ExtensionBuilder { + b.p.icon = icon + return b +} + +func (b *ExtensionBuilder) Schema(schema PropertySchemaID) *ExtensionBuilder { + b.p.schema = schema + return b +} + +func (b *ExtensionBuilder) Visualizer(visualizer visualizer.Visualizer) *ExtensionBuilder { + b.p.visualizer = visualizer + return b +} + +func (b *ExtensionBuilder) SingleOnly(singleOnly bool) *ExtensionBuilder { + b.p.singleOnly = singleOnly + return b +} + +func (b *ExtensionBuilder) WidgetLayout(widgetLayout *WidgetLayout) *ExtensionBuilder { + b.p.widgetLayout = widgetLayout + return b +} + +func (b *ExtensionBuilder) System(s bool) *ExtensionBuilder { + b.s = s + return b +} diff --git a/server/pkg/plugin/extension_builder_test.go b/server/pkg/plugin/extension_builder_test.go new file mode 100644 index 000000000..03d7446d4 --- /dev/null +++ b/server/pkg/plugin/extension_builder_test.go @@ -0,0 +1,271 @@ +package plugin + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/visualizer" + "github.com/stretchr/testify/assert" +) + +func TestExtensionBuilder_Name(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").Name(i18n.StringFrom("nnn")).MustBuild() + assert.Equal(t, i18n.StringFrom("nnn"), res.Name()) +} + +func TestExtensionBuilder_Description(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").Description(i18n.StringFrom("ddd")).MustBuild() + assert.Equal(t, i18n.StringFrom("ddd"), res.Description()) +} + +func TestExtensionBuilder_ID(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").MustBuild() + assert.Equal(t, ExtensionID("xxx"), res.ID()) +} + +func TestExtensionBuilder_Type(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").Type("ppp").MustBuild() + assert.Equal(t, ExtensionType("ppp"), res.Type()) +} + +func TestExtensionBuilder_Icon(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").Icon("ccc").MustBuild() + assert.Equal(t, "ccc", res.Icon()) +} + +func TestExtensionBuilder_SingleOnly(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").SingleOnly(true).MustBuild() + assert.Equal(t, true, res.SingleOnly()) +} + +func TestExtensionBuilder_Schema(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").Schema(MustPropertySchemaID("hoge~0.1.0/fff")).MustBuild() + assert.Equal(t, MustPropertySchemaID("hoge~0.1.0/fff"), res.Schema()) +} + +func TestExtensionBuilder_Visualizer(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").Visualizer("ccc").MustBuild() + assert.Equal(t, visualizer.Visualizer("ccc"), res.Visualizer()) +} + +func TestExtensionBuilder_WidgetLayout(t *testing.T) { + var b = NewExtension() + wl := NewWidgetLayout( + false, true, false, false, nil, + ) + res := b.ID("xxx").WidgetLayout(&wl).MustBuild() + assert.Same(t, &wl, res.WidgetLayout()) +} + +func TestExtensionBuilder_Build(t *testing.T) { + type args struct { + icon string + id ExtensionID + extensionType ExtensionType + system bool + ename i18n.String + description i18n.String + schema PropertySchemaID + visualizer visualizer.Visualizer + widgetLayout *WidgetLayout + } + + tests := []struct { + name string + args args + expected *Extension + err error + }{ + { + name: "success not system", + args: args{ + icon: "ttt", + id: "xxx", + extensionType: "ppp", + system: false, + ename: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + schema: MustPropertySchemaID("foo~1.1.1/hhh"), + visualizer: "vvv", + widgetLayout: NewWidgetLayout( + false, false, true, false, &WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + ).Ref(), + }, + expected: &Extension{ + id: "xxx", + extensionType: "ppp", + name: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + icon: "ttt", + schema: MustPropertySchemaID("foo~1.1.1/hhh"), + visualizer: "vvv", + widgetLayout: NewWidgetLayout( + false, false, true, false, &WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + ).Ref(), + }, + }, + { + name: "fail not system type visualizer", + args: args{ + extensionType: ExtensionTypeVisualizer, + }, + err: ErrInvalidID, + }, + { + name: "fail not system type infobox", + args: args{ + extensionType: ExtensionTypeInfobox, + }, + err: ErrInvalidID, + }, + { + name: "fail nil id", + err: ErrInvalidID, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + e, err := NewExtension(). + ID(tc.args.id). + Visualizer(tc.args.visualizer). + Schema(tc.args.schema). + System(tc.args.system). + Type(tc.args.extensionType). + Description(tc.args.description). + Name(tc.args.ename). + Icon(tc.args.icon). + WidgetLayout(tc.args.widgetLayout). + Build() + if tc.err == nil { + assert.Equal(t, tc.expected, e) + } else { + assert.Equal(t, tc.err, err) + } + }) + } +} + +func TestExtensionBuilder_MustBuild(t *testing.T) { + type args struct { + icon string + id ExtensionID + extensionType ExtensionType + system bool + ename i18n.String + description i18n.String + schema PropertySchemaID + visualizer visualizer.Visualizer + widgetLayout *WidgetLayout + singleOnly bool + } + + tests := []struct { + name string + args args + expected *Extension + err error + }{ + { + name: "success not system", + args: args{ + icon: "ttt", + id: "xxx", + extensionType: "ppp", + system: false, + ename: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + schema: MustPropertySchemaID("foo~1.1.1/hhh"), + visualizer: "vvv", + singleOnly: true, + widgetLayout: NewWidgetLayout( + false, false, true, false, &WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }).Ref(), + }, + expected: &Extension{ + id: "xxx", + extensionType: "ppp", + name: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + icon: "ttt", + schema: MustPropertySchemaID("foo~1.1.1/hhh"), + visualizer: "vvv", + singleOnly: true, + widgetLayout: NewWidgetLayout( + false, false, true, false, &WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }).Ref(), + }, + }, + { + name: "fail not system type visualizer", + args: args{ + extensionType: ExtensionTypeVisualizer, + }, + err: ErrInvalidID, + }, + { + name: "fail not system type infobox", + args: args{ + extensionType: ExtensionTypeInfobox, + }, + err: ErrInvalidID, + }, + { + name: "fail nil id", + err: ErrInvalidID, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + build := func() *Extension { + t.Helper() + return NewExtension(). + ID(tc.args.id). + Visualizer(tc.args.visualizer). + Schema(tc.args.schema). + System(tc.args.system). + Type(tc.args.extensionType). + Description(tc.args.description). + Name(tc.args.ename). + Icon(tc.args.icon). + SingleOnly(tc.args.singleOnly). + WidgetLayout(tc.args.widgetLayout). + MustBuild() + } + + if tc.err != nil { + assert.PanicsWithValue(t, tc.err, func() { _ = build() }) + } else { + assert.Equal(t, tc.expected, build()) + } + }) + } +} diff --git a/server/pkg/plugin/extension_test.go b/server/pkg/plugin/extension_test.go new file mode 100644 index 000000000..1c446a4bd --- /dev/null +++ b/server/pkg/plugin/extension_test.go @@ -0,0 +1,67 @@ +package plugin + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/visualizer" + "github.com/stretchr/testify/assert" +) + +func TestExtension(t *testing.T) { + expected := struct { + ID ExtensionID + Type ExtensionType + Name i18n.String + Description i18n.String + Icon string + Schema PropertySchemaID + Visualizer visualizer.Visualizer + SingleOnly bool + WidgetLayout *WidgetLayout + }{ + ID: "xxx", + Type: ExtensionTypePrimitive, + Name: i18n.StringFrom("aaa"), + Description: i18n.StringFrom("ddd"), + Icon: "test", + Schema: MustPropertySchemaID("hoge~0.1.0/fff"), + Visualizer: "vvv", + SingleOnly: true, + WidgetLayout: NewWidgetLayout(false, false, true, false, nil).Ref(), + } + + actual := NewExtension(). + ID("xxx"). + Name(i18n.StringFrom("aaa")). + Description(i18n.StringFrom("ddd")). + Schema(MustPropertySchemaID("hoge~0.1.0/fff")). + Icon("test"). + WidgetLayout(NewWidgetLayout(false, false, true, false, nil).Ref()). + Visualizer("vvv"). + SingleOnly(true). + Type(ExtensionTypePrimitive). + MustBuild() + + assert.Equal(t, expected.Visualizer, actual.Visualizer()) + assert.Equal(t, expected.Type, actual.Type()) + assert.Equal(t, expected.Description, actual.Description()) + assert.Equal(t, expected.Name, actual.Name()) + assert.Equal(t, expected.Icon, actual.Icon()) + assert.Equal(t, expected.SingleOnly, actual.SingleOnly()) + assert.Equal(t, expected.WidgetLayout, actual.WidgetLayout()) + assert.Equal(t, expected.Schema, actual.Schema()) + assert.Equal(t, expected.ID, actual.ID()) +} + +func TestExtension_Rename(t *testing.T) { + p := NewExtension().ID("aaa").Name(i18n.StringFrom("x")).MustBuild() + p.Rename(i18n.StringFrom("z")) + assert.Equal(t, i18n.StringFrom("z"), p.Name()) +} + +func TestExtension_SetDescription(t *testing.T) { + p := NewExtension().ID("xx").MustBuild() + p.SetDescription(i18n.StringFrom("xxx")) + assert.Equal(t, i18n.StringFrom("xxx"), p.Description()) +} diff --git a/server/pkg/plugin/id.go b/server/pkg/plugin/id.go new file mode 100644 index 000000000..082f9f94f --- /dev/null +++ b/server/pkg/plugin/id.go @@ -0,0 +1,27 @@ +package plugin + +import "github.com/reearth/reearth-backend/pkg/id" + +type ID = id.PluginID +type ExtensionID = id.PluginExtensionID +type PropertySchemaID = id.PropertySchemaID +type SceneID = id.SceneID + +var NewID = id.NewPluginID +var NewSceneID = id.NewSceneID +var NewPropertySchemaID = id.NewPropertySchemaID + +var MustID = id.MustPluginID +var MustSceneID = id.MustSceneID +var MustPropertySchemaID = id.MustPropertySchemaID + +var IDFrom = id.PluginIDFrom +var SceneIDFrom = id.SceneIDFrom +var PropertySchemaIDFrom = id.PropertySchemaIDFrom + +var IDFromRef = id.PluginIDFromRef +var SceneIDFromRef = id.SceneIDFromRef +var PropertySchemaIDFromRef = id.PropertySchemaIDFromRef + +var OfficialPluginID = id.OfficialPluginID +var ErrInvalidID = id.ErrInvalidID diff --git a/server/pkg/plugin/list.go b/server/pkg/plugin/list.go new file mode 100644 index 000000000..6e28f86d8 --- /dev/null +++ b/server/pkg/plugin/list.go @@ -0,0 +1,50 @@ +package plugin + +import "sort" + +type List []*Plugin + +func (l List) Find(p ID) *Plugin { + for _, q := range l { + if q.ID().Equal(p) { + return q + } + } + return nil +} + +func (l List) Concat(m List) List { + return append(l, m...) +} + +func (l List) MapToIDs(ids []ID) List { + res := make(List, 0, len(ids)) + for _, id := range ids { + res = append(res, l.Find(id)) + } + return res +} + +func (l List) Map() Map { + m := make(Map, len(l)) + for _, p := range l { + m[p.ID()] = p + } + return m +} + +type Map map[ID]*Plugin + +func (m Map) List() List { + if m == nil { + return nil + } + res := make(List, 0, len(m)) + for _, p := range m { + res = append(res, p) + } + sort.SliceStable(res, func(i, j int) bool { + return res[i].ID().String() > res[j].ID().String() + }) + return res +} diff --git a/server/pkg/plugin/list_test.go b/server/pkg/plugin/list_test.go new file mode 100644 index 000000000..08bba230c --- /dev/null +++ b/server/pkg/plugin/list_test.go @@ -0,0 +1,45 @@ +package plugin + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestList_Find(t *testing.T) { + p1 := &Plugin{id: MustID("foo~1.0.0")} + p2 := &Plugin{id: MustID("bar~1.0.0")} + assert.Equal(t, p1, List{p1, p2}.Find(p1.ID())) + assert.Nil(t, List{p1, p2}.Find(MustID("hoge~1.0.0"))) + assert.Nil(t, List(nil).Find(p1.ID())) +} + +func TestList_Concat(t *testing.T) { + p1 := &Plugin{id: MustID("foo~1.0.0")} + p2 := &Plugin{id: MustID("bar~1.0.0")} + assert.Equal(t, List{p1, p2, p2}, List{p1, p2}.Concat(List{p2})) + assert.Equal(t, List{p1}, List(nil).Concat(List{p1})) + assert.Equal(t, List{p1}, List{p1}.Concat(nil)) +} + +func TestList_Map(t *testing.T) { + p1 := &Plugin{id: MustID("foo~1.0.0")} + p2 := &Plugin{id: MustID("bar~1.0.0")} + assert.Equal(t, Map{p1.ID(): p1, p2.ID(): p2}, List{p1, p2}.Map()) + assert.Equal(t, Map{}, List(nil).Map()) +} + +func TestList_MapToIDs(t *testing.T) { + p1 := &Plugin{id: MustID("foo~1.0.0")} + p2 := &Plugin{id: MustID("bar~1.0.0")} + assert.Equal(t, List{nil, p2}, List{p1, p2}.MapToIDs([]ID{MustID("hoge~1.0.0"), p2.ID()})) + assert.Equal(t, List{}, List{p1, p2}.MapToIDs(nil)) + assert.Equal(t, List{nil}, List(nil).MapToIDs([]ID{p1.ID()})) +} + +func TestMap_List(t *testing.T) { + p1 := &Plugin{id: MustID("foo~1.0.0")} + p2 := &Plugin{id: MustID("bar~1.0.0")} + assert.Equal(t, List{p1, p2}, Map{p1.ID(): p1, p2.ID(): p2}.List()) + assert.Nil(t, Map(nil).List()) +} diff --git a/server/pkg/plugin/loader.go b/server/pkg/plugin/loader.go new file mode 100644 index 000000000..3a789fbc0 --- /dev/null +++ b/server/pkg/plugin/loader.go @@ -0,0 +1,7 @@ +package plugin + +import ( + "context" +) + +type Loader func(context.Context, []ID) ([]*Plugin, error) diff --git a/server/pkg/plugin/manifest/convert.go b/server/pkg/plugin/manifest/convert.go new file mode 100644 index 000000000..9883e1f0e --- /dev/null +++ b/server/pkg/plugin/manifest/convert.go @@ -0,0 +1,392 @@ +package manifest + +import ( + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +var errInvalidManifestWith = rerror.With(ErrInvalidManifest) + +func (i *Root) manifest(sid *plugin.SceneID, tl *TranslatedRoot) (*Manifest, error) { + var pid plugin.ID + var err error + if i.System && string(i.ID) == plugin.OfficialPluginID.Name() { + pid = plugin.OfficialPluginID + } else { + pid, err = plugin.NewID(string(i.ID), i.Version, sid) + if err != nil { + return nil, errInvalidManifestWith(fmt.Errorf("invalid plugin id: %s %s %s", i.ID, i.Version, sid)) + } + } + + var pluginSchema *property.Schema + if i.Schema != nil { + var ts *TranslatedPropertySchema + if tl != nil { + ts = &tl.Schema + } + schema, err := i.Schema.schema(pid, "@", ts) + if err != nil { + return nil, errInvalidManifestWith(rerror.From("plugin property schema", err)) + } + pluginSchema = schema + } + + var extensions []*plugin.Extension + var extensionSchemas []*property.Schema + if l := len(i.Extensions); l > 0 { + extensions = make([]*plugin.Extension, 0, l) + extensionSchemas = make([]*property.Schema, 0, l) + } + + for _, e := range i.Extensions { + var te *TranslatedExtension + if tl != nil { + te = tl.Extensions[string(e.ID)] + } + + extension, extensionSchema, err2 := e.extension(pid, i.System, te) + if err2 != nil { + return nil, errInvalidManifestWith(rerror.From(fmt.Sprintf("ext (%s)", e.ID), err2)) + } + extensions = append(extensions, extension) + extensionSchemas = append(extensionSchemas, extensionSchema) + } + + var author, repository string + if i.Author != nil { + author = *i.Author + } + if i.Repository != nil { + repository = *i.Repository + } + + var name, desc i18n.String + if tl != nil { + name = tl.Name + desc = tl.Description + } + name = name.WithDefault(i.Name) + desc = desc.WithDefaultRef(i.Description) + + p, err := plugin.New(). + ID(pid). + Name(name). + Author(author). + Description(desc). + RepositoryURL(repository). + Schema(pluginSchema.IDRef()). + Extensions(extensions). + Build() + if err != nil { + return nil, errInvalidManifestWith(rerror.From("build", err)) + } + + return &Manifest{ + Plugin: p, + Schema: pluginSchema, + ExtensionSchema: extensionSchemas, + }, nil +} + +func (i Extension) extension(pluginID plugin.ID, sys bool, te *TranslatedExtension) (*plugin.Extension, *property.Schema, error) { + eid := string(i.ID) + var ts *TranslatedPropertySchema + if te != nil { + ts = &te.PropertySchema + } + schema, err := i.Schema.schema(pluginID, eid, ts) + if err != nil { + return nil, nil, rerror.From("property schema", err) + } + + var viz visualizer.Visualizer + if i.Visualizer != nil { + switch *i.Visualizer { + case "cesium": + viz = visualizer.VisualizerCesium + case "": + return nil, nil, errors.New("visualizer missing") + default: + return nil, nil, fmt.Errorf("invalid visualizer: %s", *i.Visualizer) + } + } else if i.Type == "visualizer" { + return nil, nil, errors.New("visualizer missing") + } + + var typ plugin.ExtensionType + switch i.Type { + case "primitive": + typ = plugin.ExtensionTypePrimitive + case "widget": + typ = plugin.ExtensionTypeWidget + case "block": + typ = plugin.ExtensionTypeBlock + case "visualizer": + typ = plugin.ExtensionTypeVisualizer + case "infobox": + typ = plugin.ExtensionTypeInfobox + case "cluster": + typ = plugin.ExtensionTypeCluster + case "": + return nil, nil, errors.New("type missing") + default: + return nil, nil, fmt.Errorf("invalid type: %s", i.Type) + } + + var icon string + var singleOnly bool + if i.Icon != nil { + icon = *i.Icon + } + if i.SingleOnly != nil { + singleOnly = *i.SingleOnly + } + + var name, desc i18n.String + if te != nil { + name = te.Name + desc = te.Description + } + name = name.WithDefault(i.Name) + desc = desc.WithDefaultRef(i.Description) + + ext, err := plugin.NewExtension(). + ID(plugin.ExtensionID(eid)). + Name(name). + Description(desc). + Visualizer(viz). + Type(typ). + SingleOnly(singleOnly). + WidgetLayout(i.WidgetLayout.layout()). + Icon(icon). + Schema(schema.ID()). + System(sys). + Build() + + if err != nil { + return nil, nil, rerror.From("build", err) + } + return ext, schema, nil +} + +func (l *WidgetLayout) layout() *plugin.WidgetLayout { + if l == nil { + return nil + } + + horizontallyExtendable := false + verticallyExtendable := false + extended := false + + if l.Extendable != nil && l.Extendable.Horizontally != nil && *l.Extendable.Horizontally { + horizontallyExtendable = true + } + if l.Extendable != nil && l.Extendable.Vertically != nil && *l.Extendable.Vertically { + verticallyExtendable = true + } + if l.Extended != nil && *l.Extended { + extended = false + } + + var dl *plugin.WidgetLocation + if l.DefaultLocation != nil { + dl = &plugin.WidgetLocation{ + Zone: plugin.WidgetZoneType(l.DefaultLocation.Zone), + Section: plugin.WidgetSectionType(l.DefaultLocation.Section), + Area: plugin.WidgetAreaType(l.DefaultLocation.Area), + } + } + + return plugin.NewWidgetLayout(horizontallyExtendable, verticallyExtendable, extended, l.Floating, dl).Ref() +} + +func (i *PropertySchema) schema(pluginID plugin.ID, idstr string, ts *TranslatedPropertySchema) (*property.Schema, error) { + psid, err := property.SchemaIDFrom(pluginID.String() + "/" + idstr) + if err != nil { + return nil, fmt.Errorf("invalid id: %s", pluginID.String()+"/"+idstr) + } + + if i == nil { + return property.NewSchema(). + ID(psid). + Build() + } + + // groups + groups := make([]*property.SchemaGroup, 0, len(i.Groups)) + for _, d := range i.Groups { + var tg *TranslatedPropertySchemaGroup + if ts != nil { + tg = (*ts)[string(d.ID)] + } + + item, err := d.schemaGroup(tg) + if err != nil { + return nil, rerror.From(fmt.Sprintf("item (%s)", d.ID), err) + } + groups = append(groups, item) + } + sgroups := property.NewSchemaGroupList(groups) + if sgroups == nil { + return nil, fmt.Errorf("invalid group; it is empty or it may contain some duplicated groups or fields") + } + + // schema + schema, err := property.NewSchema(). + ID(psid). + Version(int(i.Version)). + Groups(sgroups). + LinkableFields(i.Linkable.linkable()). + Build() + if err != nil { + return nil, rerror.From("build", err) + } + return schema, nil +} + +func (p *PropertyLinkableFields) linkable() property.LinkableFields { + if p == nil { + return property.LinkableFields{} + } + return property.LinkableFields{ + LatLng: p.Latlng.pointer(), + URL: p.URL.pointer(), + } +} + +func (p *PropertyPointer) pointer() *property.SchemaFieldPointer { + if p == nil || p.FieldID == "" && p.SchemaGroupID == "" { + return nil + } + return &property.SchemaFieldPointer{ + SchemaGroup: property.SchemaGroupID(p.SchemaGroupID), + Field: property.FieldID(p.FieldID), + } +} + +func (i PropertySchemaGroup) schemaGroup(tg *TranslatedPropertySchemaGroup) (*property.SchemaGroup, error) { + var title i18n.String + if tg != nil { + title = tg.Title.Clone() + } + title = title.WithDefault(i.Title) + + var representativeField *property.FieldID + if i.RepresentativeField != nil { + representativeField = property.FieldID(*i.RepresentativeField).Ref() + } + + // fields + var fields []*property.SchemaField + if len(i.Fields) > 0 { + fields = make([]*property.SchemaField, 0, len(i.Fields)) + for _, d := range i.Fields { + var tf *TranslatedPropertySchemaField + if tg != nil { + tf = tg.Fields[string(d.ID)] + } + + field, err := d.schemaField(tf) + if err != nil { + return nil, rerror.From(fmt.Sprintf("field (%s)", d.ID), err) + } + fields = append(fields, field) + } + } + + return property.NewSchemaGroup(). + ID(property.SchemaGroupID(i.ID)). + IsList(i.List). + Fields(fields). + Title(title). + RepresentativeField(representativeField). + IsAvailableIf(i.AvailableIf.condition()). + Build() +} + +func (o *PropertyCondition) condition() *property.Condition { + if o == nil { + return nil + } + return &property.Condition{ + Field: property.FieldID(o.Field), + Value: toValue(o.Value, o.Type), + } +} + +func (i PropertySchemaField) schemaField(tf *TranslatedPropertySchemaField) (*property.SchemaField, error) { + t := property.ValueType(i.Type) + if !t.Valid() { + return nil, fmt.Errorf("invalid value type: %s", i.Type) + } + + var title, desc i18n.String + if tf != nil { + title = tf.Title.Clone() + desc = tf.Description.Clone() + } + title = title.WithDefaultRef(i.Title) + desc = desc.WithDefaultRef(i.Description) + + var prefix, suffix string + if i.Prefix != nil { + prefix = *i.Prefix + } + if i.Suffix != nil { + suffix = *i.Suffix + } + + var choices []property.SchemaFieldChoice + if len(i.Choices) > 0 { + choices = make([]property.SchemaFieldChoice, 0, len(i.Choices)) + for _, c := range i.Choices { + if c.Key == "" { + continue + } + + var t i18n.String + if tf != nil { + t = tf.Choices[c.Key] + } + choices = append(choices, c.choice(t)) + } + } + + f, err := property.NewSchemaField(). + ID(property.FieldID(i.ID)). + Name(title). + Description(desc). + Type(t). + Prefix(prefix). + Suffix(suffix). + DefaultValue(toValue(i.DefaultValue, i.Type)). + MinRef(i.Min). + MaxRef(i.Max). + Choices(choices). + UIRef(property.SchemaFieldUIFromRef(i.UI)). + IsAvailableIf(i.AvailableIf.condition()). + Build() + if err != nil { + return nil, rerror.From("build", err) + } + return f, err +} + +func (c Choice) choice(t i18n.String) property.SchemaFieldChoice { + return property.SchemaFieldChoice{ + Key: c.Key, + Title: t.WithDefault(c.Label), + Icon: c.Icon, + } +} + +func toValue(v interface{}, t Valuetype) *property.Value { + return property.ValueType(t).ValueFrom(v) +} diff --git a/server/pkg/plugin/manifest/convert_test.go b/server/pkg/plugin/manifest/convert_test.go new file mode 100644 index 000000000..8643b91af --- /dev/null +++ b/server/pkg/plugin/manifest/convert_test.go @@ -0,0 +1,1034 @@ +package manifest + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/stretchr/testify/assert" +) + +func TestToValue(t *testing.T) { + v := property.ValueTypeBool + var vv *property.Value = nil + assert.Equal(t, toValue(false, "bool"), v.ValueFrom(false)) + assert.Equal(t, toValue("xx", "xxx"), vv) +} + +func TestChoice(t *testing.T) { + tests := []struct { + name string + ch Choice + tc i18n.String + expected property.SchemaFieldChoice + }{ + { + name: "success", + ch: Choice{ + Icon: "aaa", + Key: "nnn", + Label: "vvv", + }, + expected: property.SchemaFieldChoice{ + Key: "nnn", + Title: i18n.StringFrom("vvv"), + Icon: "aaa", + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, tt.ch.choice(tt.tc)) + }) + } +} + +func TestManifest(t *testing.T) { + es := "" + cesium := "cesium" + a := "aaa" + d := "ddd" + r := "rrr" + + tests := []struct { + name string + root *Root + scene *plugin.SceneID + expected *Manifest + tl *TranslatedRoot + err string + }{ + { + name: "success official plugin", + root: &Root{ + Author: &a, + Name: "aaa", + ID: "reearth", + Description: &d, + Extensions: []Extension{{ + Description: nil, + ID: "cesium", + Name: "", + Type: "visualizer", + Visualizer: &cesium, + }}, + Repository: &r, + System: true, + Version: "1.1.1", + Schema: &PropertySchema{ + Groups: []PropertySchemaGroup{ + {ID: "default"}, + }, + }, + }, + tl: &TranslatedRoot{ + Name: i18n.String{"ja": "A"}, + Description: i18n.String{"ja": "B"}, + Extensions: map[string]*TranslatedExtension{"cesium": {Name: i18n.String{"ja": "ใ‚ปใ‚ธใ‚ฆใƒ "}}}, + Schema: TranslatedPropertySchema{"default": {Title: i18n.String{"ja": "ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"}}}, + }, + expected: &Manifest{ + Plugin: plugin.New(). + ID(plugin.OfficialPluginID). + Name(i18n.String{"en": "aaa", "ja": "A"}). + Author(a). + RepositoryURL(r). + Description(i18n.String{"en": d, "ja": "B"}). + Schema(property.NewSchemaID(plugin.OfficialPluginID, "@").Ref()). + Extensions([]*plugin.Extension{ + plugin.NewExtension(). + ID("cesium"). + Name(i18n.String{"ja": "ใ‚ปใ‚ธใ‚ฆใƒ "}). + Visualizer("cesium"). + Type("visualizer"). + Schema(property.NewSchemaID(plugin.OfficialPluginID, "cesium")). + System(true). + MustBuild(), + }).MustBuild(), + ExtensionSchema: property.SchemaList{ + property.NewSchema(). + ID(property.NewSchemaID(plugin.OfficialPluginID, "cesium")). + MustBuild(), + }, + Schema: property.NewSchema(). + ID(property.NewSchemaID(plugin.OfficialPluginID, "@")). + Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + property.NewSchemaGroup().ID("default").Title(i18n.String{"ja": "ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"}).MustBuild(), + })). + MustBuild(), + }, + }, + { + name: "success empty name", + root: &Root{ + Name: "reearth", + ID: "reearth", + System: true, + }, + expected: &Manifest{ + Plugin: plugin.New().ID(plugin.OfficialPluginID).Name(i18n.StringFrom("reearth")).MustBuild(), + }, + }, + { + name: "fail invalid manifest - extension", + root: &Root{ + Author: &a, + Name: "aaa", + ID: "reearth", + Description: &d, + Extensions: []Extension{{ + Description: nil, + ID: "cesium", + Name: "", + Schema: nil, + Type: "visualizer", + Visualizer: &es, + }}, + Repository: &r, + System: true, + Version: "1.1.1", + }, + expected: &Manifest{ + Plugin: plugin.New(). + ID(plugin.OfficialPluginID). + Name(i18n.StringFrom("aaa")). + Extensions([]*plugin.Extension{ + plugin.NewExtension(). + ID("cesium"). + Visualizer("cesium"). + Type("visualizer"). + System(true). + MustBuild(), + }). + MustBuild(), + ExtensionSchema: nil, + Schema: nil, + }, + err: "invalid manifest: ext (cesium): visualizer missing", + }, + { + name: "fail invalid manifest - id", + root: &Root{ + Name: "", + ID: "", + System: false, + }, + expected: &Manifest{ + Plugin: plugin.New(). + ID(plugin.OfficialPluginID). + Name(i18n.StringFrom("reearth")). + MustBuild(), + }, + err: "invalid manifest: invalid plugin id: ", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + m, err := tt.root.manifest(tt.scene, tt.tl) + if tt.err == "" { + assert.Equal(t, tt.expected, m) + assert.NoError(t, err) + } else { + assert.Nil(t, m) + assert.EqualError(t, err, tt.err) + } + }) + } +} + +func TestExtension(t *testing.T) { + es := "" + cesium := "cesium" + d := "ddd" + i := "xx:/aa.bb" + tr := true + + tests := []struct { + name string + ext Extension + sys bool + tl *TranslatedExtension + pid plugin.ID + expectedPE *plugin.Extension + expectedPS *property.Schema + err string + }{ + { + name: "visualizer", + ext: Extension{ + Description: &d, + ID: "cesium", + Name: "Cesium", + Icon: &i, + Schema: &PropertySchema{ + Groups: []PropertySchemaGroup{ + {ID: "default"}, + }, + }, + Type: "visualizer", + Visualizer: &cesium, + }, + sys: true, + pid: plugin.OfficialPluginID, + tl: &TranslatedExtension{ + Name: i18n.String{"ja": "ใ‚ปใ‚ธใ‚ฆใƒ "}, + Description: i18n.String{"ja": "DDD"}, + PropertySchema: TranslatedPropertySchema{ + "default": {Title: i18n.String{"ja": "ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"}}, + }, + }, + expectedPE: plugin.NewExtension(). + ID("cesium"). + Name(i18n.String{"en": "Cesium", "ja": "ใ‚ปใ‚ธใ‚ฆใƒ "}). + Visualizer("cesium"). + Type(plugin.ExtensionTypeVisualizer). + System(true). + Description(i18n.String{"en": "ddd", "ja": "DDD"}). + Schema(property.MustSchemaID("reearth/cesium")). + Icon(i). + MustBuild(), + expectedPS: property.NewSchema(). + ID(property.MustSchemaID("reearth/cesium")). + Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + property.NewSchemaGroup().ID("default").Title(i18n.String{"ja": "ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"}).MustBuild(), + })). + MustBuild(), + }, + { + name: "primitive", + ext: Extension{ + Description: &d, + ID: "cesium", + Name: "Cesium", + Schema: nil, + Type: "primitive", + Visualizer: &cesium, + }, + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: plugin.NewExtension(). + ID("cesium"). + Name(i18n.StringFrom("Cesium")). + Visualizer("cesium"). + Type(plugin.ExtensionTypePrimitive). + System(true). + Description(i18n.StringFrom("ddd")). + Schema(property.MustSchemaID("reearth/cesium")). + MustBuild(), + expectedPS: property.NewSchema(). + ID(property.MustSchemaID("reearth/cesium")). + MustBuild(), + }, + { + name: "widget", + ext: Extension{ + Description: &d, + ID: "cesium", + Name: "Cesium", + Schema: nil, + Type: "widget", + SingleOnly: &tr, + }, + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: plugin.NewExtension(). + ID("cesium"). + Name(i18n.StringFrom("Cesium")). + Visualizer(""). + Type(plugin.ExtensionTypeWidget). + System(true). + Description(i18n.StringFrom("ddd")). + Schema(property.MustSchemaID("reearth/cesium")). + SingleOnly(true). + MustBuild(), + expectedPS: property.NewSchema(). + ID(property.MustSchemaID("reearth/cesium")). + MustBuild(), + }, + { + name: "block", + ext: Extension{ + Description: &d, + ID: "cesium", + Name: "Cesium", + Schema: nil, + Type: "block", + }, + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: plugin.NewExtension(). + ID("cesium").Name(i18n.StringFrom("Cesium")). + Visualizer(""). + Type(plugin.ExtensionTypeBlock). + System(true). + Description(i18n.StringFrom("ddd")). + Schema(property.MustSchemaID("reearth/cesium")). + MustBuild(), + expectedPS: property.NewSchema(). + ID(property.MustSchemaID("reearth/cesium")). + MustBuild(), + }, + { + name: "infobox", + ext: Extension{ + Description: &d, + ID: "cesium", + Name: "Cesium", + Schema: nil, + Type: "infobox", + Visualizer: &cesium, + }, + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: plugin.NewExtension(). + ID("cesium"). + Name(i18n.StringFrom("Cesium")). + Visualizer("cesium"). + Type(plugin.ExtensionTypeInfobox). + System(true). + Description(i18n.StringFrom("ddd")). + Schema(property.MustSchemaID("reearth/cesium")). + MustBuild(), + expectedPS: property.NewSchema(). + ID(property.MustSchemaID("reearth/cesium")). + MustBuild(), + }, + { + name: "cluster", + ext: Extension{ + Description: &d, + ID: "cesium", + Name: "Cesium", + Schema: nil, + Type: "cluster", + Visualizer: &cesium, + }, + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: plugin.NewExtension(). + ID("cesium"). + Name(i18n.StringFrom("Cesium")). + Visualizer("cesium"). + Type(plugin.ExtensionTypeCluster). + System(true). + Description(i18n.StringFrom("ddd")). + Schema(property.MustSchemaID("reearth/cesium")). + MustBuild(), + expectedPS: property.NewSchema(). + ID(property.MustSchemaID("reearth/cesium")). + MustBuild(), + }, + { + name: "empty visualizer", + ext: Extension{ + Description: &d, + ID: "cesium", + Name: "Cesium", + Schema: nil, + Type: "visualizer", + Visualizer: &es, + }, + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: nil, + expectedPS: nil, + err: "visualizer missing", + }, + { + name: "nil visualizer", + ext: Extension{ + Description: &d, + ID: "cesium", + Name: "Cesium", + Schema: nil, + Type: "visualizer", + Visualizer: nil, + }, + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: nil, + expectedPS: nil, + err: "visualizer missing", + }, + { + name: "empty type", + ext: Extension{ + Description: &d, + ID: "cesium", + Name: "Cesium", + Schema: nil, + Type: "", + Visualizer: &cesium, + }, + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: nil, + expectedPS: nil, + err: "type missing", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + pe, ps, err := tt.ext.extension(tt.pid, tt.sys, tt.tl) + if tt.err == "" { + assert.Equal(t, tt.expectedPE, pe) + assert.Equal(t, tt.expectedPS, ps) + assert.Nil(t, err) + } else { + assert.EqualError(t, err, tt.err) + assert.Nil(t, pe) + assert.Nil(t, ps) + } + }) + } +} + +func TestPointer(t *testing.T) { + sg := "aaa" + f := "xxx" + + tests := []struct { + name string + pp *PropertyPointer + expected *property.SchemaFieldPointer + }{ + { + name: "failed nil PropertyPointer", + pp: nil, + expected: nil, + }, + { + name: "failed empty FieldID and SchemaGroupID", + pp: &PropertyPointer{ + FieldID: "", + SchemaGroupID: "", + }, + expected: nil, + }, + { + name: "success", + pp: &PropertyPointer{ + FieldID: "xxx", + SchemaGroupID: "aaa", + }, + expected: &property.SchemaFieldPointer{ + SchemaGroup: property.SchemaGroupID(sg), + Field: property.FieldID(f), + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, tt.pp.pointer()) + }) + } +} + +func TestCondition(t *testing.T) { + v := toValue("xxx", "string") + + tests := []struct { + name string + con *PropertyCondition + expected *property.Condition + }{ + { + name: "failed nil condition", + con: nil, + expected: nil, + }, + { + name: "success", + con: &PropertyCondition{ + Field: "aaa", + Type: "string", + Value: "xxx", + }, + expected: &property.Condition{ + Field: "aaa", + Value: v, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, tt.con.condition()) + }) + } +} + +func TestLinkable(t *testing.T) { + l := "location" + d := "default" + u := "url" + + tests := []struct { + name string + p *PropertyLinkableFields + expected property.LinkableFields + }{ + { + name: "nil linkable fields", + p: nil, + expected: property.LinkableFields{}, + }, + { + name: "success linkable fields", + p: &PropertyLinkableFields{ + Latlng: &PropertyPointer{ + FieldID: "location", + SchemaGroupID: "default", + }, + URL: &PropertyPointer{ + FieldID: "url", + SchemaGroupID: "default", + }, + }, + expected: property.LinkableFields{ + LatLng: &property.SchemaFieldPointer{SchemaGroup: property.SchemaGroupID(d), Field: property.FieldID(l)}, + URL: &property.SchemaFieldPointer{SchemaGroup: property.SchemaGroupID(d), Field: property.FieldID(u)}, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, tt.p.linkable()) + }) + } +} + +func TestSchema(t *testing.T) { + str := "ddd" + + tests := []struct { + name, psid string + ps *PropertySchema + pid plugin.ID + tl *TranslatedPropertySchema + expected *property.Schema + err string + }{ + { + name: "fail invalid id", + psid: "~", + ps: &PropertySchema{ + Groups: nil, + Linkable: nil, + Version: 0, + }, + pid: plugin.MustID("aaa~1.1.1"), + expected: nil, + err: "invalid id: aaa~1.1.1/~", + }, + { + name: "success nil PropertySchema", + psid: "marker", + ps: nil, + pid: plugin.OfficialPluginID, + expected: property.NewSchema().ID(property.MustSchemaID("reearth/marker")).MustBuild(), + }, + { + name: "success", + psid: "marker", + ps: &PropertySchema{ + Groups: []PropertySchemaGroup{{ + AvailableIf: nil, + Description: &str, + Fields: []PropertySchemaField{{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: nil, + ID: "location", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: nil, + Type: "latlng", + UI: nil, + }}, + ID: "default", + List: false, + Title: "marker", + }}, + Linkable: nil, + Version: 0, + }, + tl: &TranslatedPropertySchema{ + "default": {Title: i18n.String{"ja": "ใƒžใƒผใ‚ซใƒผ"}}, + }, + pid: plugin.OfficialPluginID, + expected: property. + NewSchema(). + ID(property.MustSchemaID("reearth/marker")). + Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + property.NewSchemaGroup(). + ID("default"). + Title(i18n.String{"en": "marker", "ja": "ใƒžใƒผใ‚ซใƒผ"}). + Fields([]*property.SchemaField{ + property.NewSchemaField(). + ID("location"). + Type(property.ValueTypeLatLng). + MustBuild()}, + ). + MustBuild()}, + )). + MustBuild(), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res, err := tt.ps.schema(tt.pid, tt.psid, tt.tl) + if tt.err == "" { + assert.Equal(t, tt.expected, res) + assert.Nil(t, err) + } else { + assert.Nil(t, res) + assert.EqualError(t, err, tt.err) + } + }) + } +} + +func TestSchemaGroup(t *testing.T) { + str := "marker" + des := "ddd" + + tests := []struct { + name string + psg PropertySchemaGroup + tl *TranslatedPropertySchemaGroup + expected *property.SchemaGroup + err string + }{ + { + name: "success reearth/cesium", + psg: PropertySchemaGroup{ + AvailableIf: nil, + Description: &des, + Fields: []PropertySchemaField{{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: nil, + ID: "location", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: nil, + Type: "latlng", + UI: nil, + }}, + ID: "default", + List: false, + Title: "marker", + }, + tl: &TranslatedPropertySchemaGroup{ + Title: i18n.String{"ja": "ใƒžใƒผใ‚ซใƒผ"}, + Description: i18n.String{"ja": "่ชฌๆ˜Ž"}, + Fields: map[string]*TranslatedPropertySchemaField{ + "location": {Title: i18n.String{"en": "x"}}, + }, + }, + expected: property.NewSchemaGroup(). + ID("default"). + Title(i18n.String{"en": str, "ja": "ใƒžใƒผใ‚ซใƒผ"}). + Fields([]*property.SchemaField{ + property.NewSchemaField(). + ID("location"). + Type(property.ValueTypeLatLng). + Name(i18n.String{"en": "x"}). + MustBuild(), + }).MustBuild(), + }, + { + name: "fail invalid schema field", + psg: PropertySchemaGroup{ + AvailableIf: nil, + Description: &des, + Fields: []PropertySchemaField{{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: nil, + ID: "location", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: nil, + Type: "xx", + UI: nil, + }}, + ID: "default", + List: false, + Title: "marker", + }, + expected: nil, + err: "field (location): invalid value type: xx", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res, err := tt.psg.schemaGroup(tt.tl) + if tt.err == "" { + assert.Equal(t, tt.expected, res) + assert.Nil(t, err) + } else { + assert.Nil(t, res) + assert.EqualError(t, err, tt.err) + } + }) + } +} + +func TestSchemaField(t *testing.T) { + str := "xx" + + tests := []struct { + name string + psg PropertySchemaField + tl *TranslatedPropertySchemaField + expected *property.SchemaField + err error + }{ + { + name: "success", + psg: PropertySchemaField{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: nil, + ID: "aaa", + Max: nil, + Min: nil, + Title: &str, + Prefix: nil, + Suffix: nil, + Type: "string", + UI: nil, + }, + tl: &TranslatedPropertySchemaField{ + Title: i18n.String{"en": "TITLE", "ja": "ใ‚ฟใ‚คใƒˆใƒซ"}, + Description: i18n.String{"ja": "่ชฌๆ˜Ž"}, + Choices: map[string]i18n.String{"A": {"en": "a"}}, + }, + expected: property.NewSchemaField(). + ID("aaa"). + Name(i18n.String{"en": str, "ja": "ใ‚ฟใ‚คใƒˆใƒซ"}). + Description(i18n.String{"ja": "่ชฌๆ˜Ž"}). + Type(property.ValueTypeString). + MustBuild(), + err: nil, + }, + { + name: "success description not nil", + psg: PropertySchemaField{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: &str, + ID: "aaa", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: nil, + Type: "string", + UI: nil, + }, + expected: property.NewSchemaField(). + ID("aaa"). + Name(i18n.StringFrom("")). + Description(i18n.StringFrom("xx")). + Type(property.ValueTypeString). + MustBuild(), + err: nil, + }, + { + name: "success prefix not nil", + psg: PropertySchemaField{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: nil, + ID: "aaa", + Max: nil, + Min: nil, + Title: nil, + Prefix: &str, + Suffix: nil, + Type: "string", + UI: nil, + }, + expected: property.NewSchemaField(). + ID("aaa"). + Prefix("xx"). + Name(i18n.StringFrom("")). + Description(i18n.StringFrom("")). + Type(property.ValueTypeString). + MustBuild(), + err: nil, + }, + { + name: "success suffix not nil", + psg: PropertySchemaField{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: nil, + ID: "aaa", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: &str, + Type: "string", + UI: nil, + }, + expected: property.NewSchemaField(). + ID("aaa"). + Name(i18n.StringFrom("")). + Description(i18n.StringFrom("")). + Suffix("xx"). + Type(property.ValueTypeString). + MustBuild(), + err: nil, + }, + { + name: "success choices not empty", + psg: PropertySchemaField{ + AvailableIf: nil, + Choices: []Choice{ + { + Icon: "aaa", + Key: "nnn", + Label: "vvv", + }, + { + Key: "z", + }, + }, + DefaultValue: nil, + Description: nil, + ID: "aaa", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: nil, + Type: "string", + UI: nil, + }, + tl: &TranslatedPropertySchemaField{ + Choices: map[string]i18n.String{"nnn": {"ja": "a"}, "z": {"en": "Z"}}, + }, + expected: property.NewSchemaField(). + ID("aaa"). + Choices([]property.SchemaFieldChoice{ + { + Key: "nnn", + Title: i18n.String{"en": "vvv", "ja": "a"}, + Icon: "aaa", + }, + { + Key: "z", + Title: i18n.String{"en": "Z"}, + }, + }). + Type(property.ValueTypeString). + Name(i18n.StringFrom("")). + Description(i18n.StringFrom("")). + MustBuild(), + err: nil, + }, + { + name: "success choices empty key", + psg: PropertySchemaField{ + AvailableIf: nil, + Choices: []Choice{ + { + Icon: "aaa", + Key: "", + Label: "vvv", + }, + }, + DefaultValue: nil, + Description: nil, + ID: "aaa", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: nil, + Type: "string", + UI: nil, + }, + expected: property.NewSchemaField(). + ID("aaa"). + Choices([]property.SchemaFieldChoice{}). + Type(property.ValueTypeString). + Name(i18n.StringFrom("")). + Description(i18n.StringFrom("")). + MustBuild(), + err: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res, err := tt.psg.schemaField(tt.tl) + if tt.err == nil { + assert.Equal(t, tt.expected, res) + assert.Nil(t, err) + } else { + assert.Nil(t, res) + assert.Equal(t, tt.err, rerror.Get(err).Err) + } + }) + } +} + +func TestLayout(t *testing.T) { + tr := true + + tests := []struct { + name string + widgetLayout WidgetLayout + expected *plugin.WidgetLayout + }{ + { + name: "convert manifest widget layout to scene widget layout", + widgetLayout: WidgetLayout{ + Extendable: &Extendable{ + Horizontally: &tr, + Vertically: nil, + }, + Extended: nil, + Floating: true, + DefaultLocation: &Location{ + Zone: "outer", + Section: "left", + Area: "top", + }, + }, + expected: plugin.NewWidgetLayout(true, false, false, true, &plugin.WidgetLocation{ + Zone: plugin.WidgetZoneOuter, + Section: plugin.WidgetSectionLeft, + Area: plugin.WidgetAreaTop, + }).Ref(), + }, + { + name: "nil default location", + widgetLayout: WidgetLayout{ + Extendable: &Extendable{ + Horizontally: nil, + Vertically: &tr, + }, + Extended: nil, + Floating: false, + DefaultLocation: nil, + }, + expected: plugin.NewWidgetLayout(false, true, false, false, nil).Ref(), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.widgetLayout.layout() + assert.Equal(t, tt.expected, res) + }) + } +} diff --git a/server/pkg/plugin/manifest/diff.go b/server/pkg/plugin/manifest/diff.go new file mode 100644 index 000000000..4453a3d78 --- /dev/null +++ b/server/pkg/plugin/manifest/diff.go @@ -0,0 +1,104 @@ +package manifest + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Diff struct { + From plugin.ID + To plugin.ID + PropertySchemaDiff property.SchemaDiff + PropertySchemaDeleted bool + DeletedExtensions []DiffExtensionDeleted + UpdatedExtensions []DiffExtensionUpdated +} + +type DiffExtensionUpdated struct { + ExtensionID plugin.ExtensionID + OldType plugin.ExtensionType + NewType plugin.ExtensionType + PropertySchemaDiff property.SchemaDiff +} + +type DiffExtensionDeleted struct { + ExtensionID plugin.ExtensionID + PropertySchemaID property.SchemaID +} + +func DiffFrom(old, new Manifest) (d Diff) { + d.From = old.Plugin.ID() + d.To = new.Plugin.ID() + + oldsid, newsid := old.Plugin.Schema(), new.Plugin.Schema() + if oldsid != nil && newsid == nil { + d.PropertySchemaDiff.From = *oldsid + d.PropertySchemaDeleted = true + } else if oldsid != nil && newsid != nil { + d.PropertySchemaDiff = property.SchemaDiffFrom(old.PropertySchema(*oldsid), old.PropertySchema(*newsid)) + } + + for _, e := range old.Plugin.Extensions() { + ne := new.Plugin.Extension(e.ID()) + if ne == nil { + d.DeletedExtensions = append(d.DeletedExtensions, DiffExtensionDeleted{ + ExtensionID: e.ID(), + PropertySchemaID: e.Schema(), + }) + continue + } + + oldps, newps := old.PropertySchema(e.Schema()), new.PropertySchema(ne.Schema()) + diff := DiffExtensionUpdated{ + ExtensionID: e.ID(), + OldType: e.Type(), + NewType: ne.Type(), + PropertySchemaDiff: property.SchemaDiffFrom(oldps, newps), + } + + if diff.OldType != diff.NewType || !diff.PropertySchemaDiff.IsEmpty() { + d.UpdatedExtensions = append(d.UpdatedExtensions, diff) + } + } + + return +} + +func (d *Diff) IsEmpty() bool { + return d == nil || len(d.DeletedExtensions) == 0 && len(d.UpdatedExtensions) == 0 && d.PropertySchemaDiff.IsEmpty() && !d.PropertySchemaDeleted +} + +func (d Diff) DeletedPropertySchemas() []id.PropertySchemaID { + s := make([]id.PropertySchemaID, 0, len(d.DeletedExtensions)+1) + if d.PropertySchemaDeleted { + s = append(s, d.PropertySchemaDiff.From) + } + for _, e := range d.DeletedExtensions { + skip := false + for _, ss := range s { + if ss.Equal(e.PropertySchemaID) { + skip = true + break + } + } + if skip { + continue + } + s = append(s, e.PropertySchemaID) + } + return s +} + +func (d Diff) PropertySchmaDiffs() property.SchemaDiffList { + s := make(property.SchemaDiffList, 0, len(d.UpdatedExtensions)+1) + if !d.PropertySchemaDeleted && (!d.PropertySchemaDiff.IsEmpty() || d.PropertySchemaDiff.IsIDChanged()) { + s = append(s, d.PropertySchemaDiff) + } + for _, e := range d.UpdatedExtensions { + if !e.PropertySchemaDiff.IsEmpty() || e.PropertySchemaDiff.IsIDChanged() { + s = append(s, e.PropertySchemaDiff) + } + } + return s +} diff --git a/server/pkg/plugin/manifest/diff_test.go b/server/pkg/plugin/manifest/diff_test.go new file mode 100644 index 000000000..ed55ce1ee --- /dev/null +++ b/server/pkg/plugin/manifest/diff_test.go @@ -0,0 +1,248 @@ +package manifest + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestDiffFrom(t *testing.T) { + oldp := plugin.MustID("aaaaaa~1.0.0") + newp := plugin.MustID("aaaaaa~1.1.0") + oldps := property.MustSchemaID("aaaaaa~1.0.0/@") + olde1ps := property.MustSchemaID("aaaaaa~1.0.0/a") + olde2ps := property.MustSchemaID("aaaaaa~1.0.0/b") + olde3ps := property.MustSchemaID("aaaaaa~1.0.0/c") + olde4ps := property.MustSchemaID("aaaaaa~1.0.0/d") + olde5ps := property.MustSchemaID("aaaaaa~1.0.0/e") + newe1ps := property.MustSchemaID("aaaaaa~1.1.0/a") + old := Manifest{ + Plugin: plugin.New().ID(oldp).Schema(&oldps).Extensions([]*plugin.Extension{ + plugin.NewExtension().ID("a").Schema(olde1ps).Type(plugin.ExtensionTypeBlock).MustBuild(), + plugin.NewExtension().ID("b").Schema(olde2ps).MustBuild(), // deleted + plugin.NewExtension().ID("c").Schema(olde3ps).Type(plugin.ExtensionTypeBlock).MustBuild(), + plugin.NewExtension().ID("d").Schema(olde4ps).Type(plugin.ExtensionTypeBlock).MustBuild(), + plugin.NewExtension().ID("e").Schema(olde5ps).Type(plugin.ExtensionTypeBlock).MustBuild(), + }).MustBuild(), + Schema: property.NewSchema().ID(oldps).MustBuild(), + ExtensionSchema: []*property.Schema{ + property.NewSchema().ID(olde1ps).MustBuild(), + property.NewSchema().ID(olde2ps).MustBuild(), + property.NewSchema().ID(olde3ps).MustBuild(), + property.NewSchema().ID(olde4ps).MustBuild(), + property.NewSchema().ID(olde5ps).Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + property.NewSchemaGroup().ID("x").Fields([]*property.SchemaField{ + property.NewSchemaField().ID("y").Type(property.ValueTypeString).MustBuild(), + }).MustBuild(), // updated + })).MustBuild(), + }, + } + new := Manifest{ + Plugin: plugin.New().ID(newp).Extensions([]*plugin.Extension{ + plugin.NewExtension().ID("a").Schema(newe1ps).Type(plugin.ExtensionTypePrimitive).MustBuild(), // updated + plugin.NewExtension().ID("c").Schema(olde3ps).Type(plugin.ExtensionTypeBlock).MustBuild(), // same + plugin.NewExtension().ID("d").Schema(olde4ps).Type(plugin.ExtensionTypeBlock).MustBuild(), // property schema update + plugin.NewExtension().ID("e").Schema(olde5ps).Type(plugin.ExtensionTypeBlock).MustBuild(), // property schema update + }).MustBuild(), + ExtensionSchema: []*property.Schema{ + property.NewSchema().ID(newe1ps).MustBuild(), + property.NewSchema().ID(olde3ps).MustBuild(), + property.NewSchema().ID(olde4ps).Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + property.NewSchemaGroup().ID("x").MustBuild(), // added + })).MustBuild(), + property.NewSchema().ID(olde5ps).Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + property.NewSchemaGroup().ID("x").Fields([]*property.SchemaField{ + property.NewSchemaField().ID("y").Type(property.ValueTypeBool).MustBuild(), + }).MustBuild(), // updated + })).MustBuild(), + }, + } + + type args struct { + old Manifest + new Manifest + } + tests := []struct { + name string + args args + want Diff + }{ + { + name: "diff", + args: args{old: old, new: new}, + want: Diff{ + From: oldp, + To: newp, + PropertySchemaDiff: property.SchemaDiff{From: oldps}, + PropertySchemaDeleted: true, + DeletedExtensions: []DiffExtensionDeleted{{ExtensionID: "b", PropertySchemaID: olde2ps}}, + UpdatedExtensions: []DiffExtensionUpdated{ + { + ExtensionID: "a", + OldType: plugin.ExtensionTypeBlock, + NewType: plugin.ExtensionTypePrimitive, + PropertySchemaDiff: property.SchemaDiff{From: olde1ps, To: newe1ps}, + }, + { + ExtensionID: "e", + OldType: plugin.ExtensionTypeBlock, + NewType: plugin.ExtensionTypeBlock, + PropertySchemaDiff: property.SchemaDiff{ + From: olde5ps, + To: olde5ps, + TypeChanged: []property.SchemaDiffTypeChanged{ + {SchemaFieldPointer: property.SchemaFieldPointer{SchemaGroup: "x", Field: "y"}, NewType: property.ValueTypeBool}, + }, + }, + }, + }, + }, + }, + { + name: "same", + args: args{ + old: old, + new: old, + }, + want: Diff{ + From: oldp, + To: oldp, + PropertySchemaDiff: property.SchemaDiff{From: oldps, To: oldps}, + }, + }, + { + name: "nil", + args: args{}, + want: Diff{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, DiffFrom(tt.args.old, tt.args.new)) + }) + } +} + +func TestDiff_IsEmpty(t *testing.T) { + tests := []struct { + name string + target *Diff + want bool + }{ + { + name: "presemt", + target: &Diff{ + PropertySchemaDeleted: true, + }, + want: false, + }, + { + name: "empty", + target: &Diff{}, + want: true, + }, + { + name: "empty2", + target: &Diff{ + From: plugin.MustID("a~1.0.0"), + }, + want: true, + }, + { + name: "nil", + target: nil, + want: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.IsEmpty()) + }) + } +} + +func TestDiff_DeletedPropertySchemas(t *testing.T) { + ps1 := property.MustSchemaID("a~1.0.0/a") + ps2 := property.MustSchemaID("a~1.0.0/b") + tests := []struct { + name string + target Diff + want []property.SchemaID + }{ + { + name: "ok", + target: Diff{ + PropertySchemaDiff: property.SchemaDiff{ + From: ps1, + }, + PropertySchemaDeleted: true, + DeletedExtensions: []DiffExtensionDeleted{ + {PropertySchemaID: ps2}, + {PropertySchemaID: ps2}, + }, + }, + want: []property.SchemaID{ + ps1, + ps2, + }, + }, + { + name: "empty", + target: Diff{}, + want: []property.SchemaID{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.DeletedPropertySchemas()) + }) + } +} + +func TestDiff_PropertySchmaDiffs(t *testing.T) { + ps1 := property.MustSchemaID("a~1.0.0/a") + ps2 := property.MustSchemaID("a~1.0.0/b") + tests := []struct { + name string + target Diff + want property.SchemaDiffList + }{ + { + name: "ok", + target: Diff{ + PropertySchemaDiff: property.SchemaDiff{ + From: ps1, + }, + UpdatedExtensions: []DiffExtensionUpdated{ + {PropertySchemaDiff: property.SchemaDiff{ + From: ps2, + }}, + }, + }, + want: property.SchemaDiffList{ + { + From: ps1, + }, + { + From: ps2, + }, + }, + }, + { + name: "empty", + target: Diff{}, + want: property.SchemaDiffList{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.PropertySchmaDiffs()) + }) + } +} diff --git a/server/pkg/plugin/manifest/manifest.go b/server/pkg/plugin/manifest/manifest.go new file mode 100644 index 000000000..010fc1078 --- /dev/null +++ b/server/pkg/plugin/manifest/manifest.go @@ -0,0 +1,30 @@ +package manifest + +import ( + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Manifest struct { + Plugin *plugin.Plugin + ExtensionSchema property.SchemaList + Schema *property.Schema +} + +func (m Manifest) PropertySchemas() property.SchemaList { + sl := append(property.SchemaList{}, m.ExtensionSchema...) + if m.Schema != nil { + sl = append(sl, m.Schema) + } + return sl +} + +func (m Manifest) PropertySchema(psid property.SchemaID) *property.Schema { + if psid.IsNil() { + return nil + } + if m.Schema != nil && psid.Equal(m.Schema.ID()) { + return m.Schema + } + return m.ExtensionSchema.Find(psid) +} diff --git a/server/pkg/plugin/manifest/manifest_test.go b/server/pkg/plugin/manifest/manifest_test.go new file mode 100644 index 000000000..070d905cc --- /dev/null +++ b/server/pkg/plugin/manifest/manifest_test.go @@ -0,0 +1,104 @@ +package manifest + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestManifest_PropertySchemas(t *testing.T) { + s1 := property.NewSchema().ID(property.MustSchemaID("xx~1.0.0/aa")).MustBuild() + s2 := property.NewSchema().ID(property.MustSchemaID("xx~1.0.0/bb")).MustBuild() + + tests := []struct { + name string + target Manifest + want property.SchemaList + }{ + { + name: "schema and extensions", + target: Manifest{ + Schema: s1, + ExtensionSchema: property.SchemaList{s2}, + }, + want: property.SchemaList{s2, s1}, + }, + { + name: "schema only", + target: Manifest{ + Schema: s1, + }, + want: property.SchemaList{s1}, + }, + { + name: "extensions only", + target: Manifest{ + ExtensionSchema: property.SchemaList{s2}, + }, + want: property.SchemaList{s2}, + }, + { + name: "empty", + target: Manifest{}, + want: property.SchemaList{}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.PropertySchemas()) + }) + } +} + +func TestManifest_PropertySchema(t *testing.T) { + s1 := property.NewSchema().ID(property.MustSchemaID("xx~1.0.0/aa")).MustBuild() + s2 := property.NewSchema().ID(property.MustSchemaID("xx~1.0.0/bb")).MustBuild() + m := Manifest{ + Schema: s1, + ExtensionSchema: property.SchemaList{s2}, + } + + type args struct { + psid property.SchemaID + } + tests := []struct { + name string + target Manifest + args args + want *property.Schema + }{ + { + name: "schema", + target: m, + args: args{psid: s1.ID()}, + want: s1, + }, + { + name: "extension", + target: m, + args: args{psid: s2.ID()}, + want: s2, + }, + { + name: "empty", + target: Manifest{}, + args: args{psid: s2.ID()}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + res := tt.target.PropertySchema(tt.args.psid) + if tt.want == nil { + assert.Nil(t, res) + } else { + assert.Same(t, tt.want, res) + } + }) + } +} diff --git a/server/pkg/plugin/manifest/parser.go b/server/pkg/plugin/manifest/parser.go new file mode 100644 index 000000000..292d4b7ca --- /dev/null +++ b/server/pkg/plugin/manifest/parser.go @@ -0,0 +1,58 @@ +package manifest + +//go:generate go run github.com/idubinskiy/schematyper -o schema_gen.go --package manifest ../../../schemas/plugin_manifest.json + +import ( + "errors" + "io" + + "github.com/goccy/go-yaml" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +var ( + ErrInvalidManifest error = errors.New("invalid manifest") + ErrFailedToParseManifest error = errors.New("failed to parse plugin manifest") + ErrSystemManifest = errors.New("cannot build system manifest") +) + +func Parse(source io.Reader, scene *plugin.SceneID, tl *TranslatedRoot) (*Manifest, error) { + root := Root{} + if err := yaml.NewDecoder(source).Decode(&root); err != nil { + return nil, ErrFailedToParseManifest + // return nil, fmt.Errorf("failed to parse plugin manifest: %w", err) + } + + manifest, err := root.manifest(scene, tl) + if err != nil { + return nil, err + } + if manifest.Plugin.ID().System() { + return nil, ErrSystemManifest + } + + return manifest, nil +} + +func ParseSystemFromBytes(source []byte, scene *plugin.SceneID, tl *TranslatedRoot) (*Manifest, error) { + root := Root{} + if err := yaml.Unmarshal(source, &root); err != nil { + return nil, ErrFailedToParseManifest + // return nil, fmt.Errorf("failed to parse plugin manifest: %w", err) + } + + manifest, err := root.manifest(scene, tl) + if err != nil { + return nil, err + } + + return manifest, nil +} + +func MustParseSystemFromBytes(source []byte, scene *plugin.SceneID, tl *TranslatedRoot) *Manifest { + m, err := ParseSystemFromBytes(source, scene, tl) + if err != nil { + panic(err) + } + return m +} diff --git a/server/pkg/plugin/manifest/parser_test.go b/server/pkg/plugin/manifest/parser_test.go new file mode 100644 index 000000000..30fbde104 --- /dev/null +++ b/server/pkg/plugin/manifest/parser_test.go @@ -0,0 +1,194 @@ +package manifest + +import ( + _ "embed" + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/visualizer" + "github.com/stretchr/testify/assert" +) + +//go:embed testdata/minimum.yml +var minimum string +var minimumExpected = &Manifest{ + Plugin: plugin.New().ID(plugin.MustID("aaa~1.1.1")).MustBuild(), +} + +//go:embed testdata/test.yml +var normal string +var normalExpected = &Manifest{ + Plugin: plugin.New().ID(plugin.MustID("aaa~1.1.1")).Name(i18n.StringFrom("bbb")).Extensions([]*plugin.Extension{ + plugin.NewExtension().ID(plugin.ExtensionID("hoge")). + Visualizer(visualizer.VisualizerCesium). + Type(plugin.ExtensionTypePrimitive). + WidgetLayout(nil). + Schema(property.MustSchemaID("aaa~1.1.1/hoge")). + MustBuild(), + }).MustBuild(), + ExtensionSchema: []*property.Schema{ + property.NewSchema().ID(property.MustSchemaID("aaa~1.1.1/hoge")).Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + property.NewSchemaGroup().ID(property.SchemaGroupID("default")). + RepresentativeField(property.FieldID("a").Ref()). + Fields([]*property.SchemaField{ + property.NewSchemaField().ID(property.FieldID("a")). + Type(property.ValueTypeBool). + DefaultValue(property.ValueTypeBool.ValueFrom(true)). + IsAvailableIf(&property.Condition{ + Field: property.FieldID("b"), + Value: property.ValueTypeNumber.ValueFrom(1), + }). + MustBuild(), + property.NewSchemaField().ID(property.FieldID("b")). + Type(property.ValueTypeNumber). + MustBuild(), + }).MustBuild(), + })).MustBuild(), + }, +} + +func TestParse(t *testing.T) { + tests := []struct { + name string + input string + expected *Manifest + err error + }{ + { + name: "success create simple manifest", + input: minimum, + expected: minimumExpected, + err: nil, + }, + { + name: "success create manifest", + input: normal, + expected: normalExpected, + err: nil, + }, + { + name: "fail not valid JSON", + input: "", + expected: nil, + err: ErrFailedToParseManifest, + }, + { + name: "fail system manifest", + input: `{ + "system": true, + "id": "reearth", + "title": "bbb", + "version": "1.1.1" + }`, + expected: nil, + err: ErrSystemManifest, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + m, err := Parse(strings.NewReader(tc.input), nil, nil) + if tc.err == nil { + if !assert.NoError(t, err) { + return + } + assert.Equal(t, tc.expected, m) + return + } + assert.ErrorIs(t, tc.err, err) + }) + } + +} + +func TestParseSystemFromBytes(t *testing.T) { + tests := []struct { + name, input string + expected *Manifest + err error + }{ + { + name: "success create simple manifest", + input: minimum, + expected: minimumExpected, + err: nil, + }, + { + name: "success create manifest", + input: normal, + expected: normalExpected, + err: nil, + }, + { + name: "fail not valid YAML", + input: "--", + expected: nil, + err: ErrFailedToParseManifest, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + m, err := ParseSystemFromBytes([]byte(tc.input), nil, nil) + if tc.err == nil { + if !assert.NoError(t, err) { + return + } + assert.Equal(t, tc.expected, m) + return + } + assert.ErrorIs(t, tc.err, err) + }) + } +} + +func TestMustParseSystemFromBytes(t *testing.T) { + tests := []struct { + name, input string + expected *Manifest + fails bool + }{ + { + name: "success create simple manifest", + input: minimum, + expected: minimumExpected, + fails: false, + }, + { + name: "success create manifest", + input: normal, + expected: normalExpected, + fails: false, + }, + { + name: "fail not valid JSON", + input: "--", + expected: nil, + fails: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + if tc.fails { + assert.Panics(t, func() { + _ = MustParseSystemFromBytes([]byte(tc.input), nil, nil) + }) + return + } + + m := MustParseSystemFromBytes([]byte(tc.input), nil, nil) + assert.Equal(t, m, tc.expected) + }) + } +} diff --git a/server/pkg/plugin/manifest/parser_translation.go b/server/pkg/plugin/manifest/parser_translation.go new file mode 100644 index 000000000..0fec6c142 --- /dev/null +++ b/server/pkg/plugin/manifest/parser_translation.go @@ -0,0 +1,41 @@ +package manifest + +// Generating types with schema typer for translation schema is disabled because some fields are wrongly typed. +// DISABLED go:generate go run github.com/idubinskiy/schematyper -o schema_translation_gen.go --package manifest --prefix Translation ../../../schemas/plugin_manifest_translation.json + +import ( + "errors" + "io" + + "github.com/goccy/go-yaml" +) + +var ( + ErrInvalidManifestTranslation error = errors.New("invalid manifest translation") + ErrFailedToParseManifestTranslation error = errors.New("failed to parse plugin manifest translation") +) + +func ParseTranslation(source io.Reader) (TranslationRoot, error) { + root := TranslationRoot{} + if err := yaml.NewDecoder(source).Decode(&root); err != nil { + return root, ErrFailedToParseManifestTranslation + } + + return root, nil +} + +func ParseTranslationFromBytes(source []byte) (TranslationRoot, error) { + tr := TranslationRoot{} + if err := yaml.Unmarshal(source, &tr); err != nil { + return tr, ErrFailedToParseManifestTranslation + } + return tr, nil +} + +func MustParseTranslationFromBytes(source []byte) TranslationRoot { + m, err := ParseTranslationFromBytes(source) + if err != nil { + panic(err) + } + return m +} diff --git a/server/pkg/plugin/manifest/parser_translation_test.go b/server/pkg/plugin/manifest/parser_translation_test.go new file mode 100644 index 000000000..db565f2e2 --- /dev/null +++ b/server/pkg/plugin/manifest/parser_translation_test.go @@ -0,0 +1,149 @@ +package manifest + +import ( + _ "embed" + "strings" + "testing" + + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +//go:embed testdata/translation.yml +var translatedManifest string +var expected = TranslationRoot{ + Description: lo.ToPtr("test plugin desc"), + Extensions: map[string]TranslationExtension{ + "test_ext": { + Name: lo.ToPtr("test ext name"), + PropertySchema: TranslationPropertySchema{ + "test_ps": TranslationPropertySchemaGroup{ + Description: lo.ToPtr("test ps desc"), + Fields: map[string]TranslationPropertySchemaField{ + "test_field": { + Choices: map[string]string{ + "test_key": "test choice value"}, + Description: lo.ToPtr("test field desc"), + Title: lo.ToPtr("test field name"), + Prefix: lo.ToPtr("P"), + Suffix: lo.ToPtr("S"), + }, + }, + Title: lo.ToPtr("test ps title"), + }, + }, + }, + }, + Name: lo.ToPtr("test plugin name"), + Schema: nil, +} + +func TestParseTranslation(t *testing.T) { + tests := []struct { + name string + input string + expected TranslationRoot + err error + }{ + { + name: "success create translation", + input: translatedManifest, + expected: expected, + err: nil, + }, + { + name: "fail not valid JSON", + input: "", + expected: TranslationRoot{}, + err: ErrFailedToParseManifestTranslation, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + r := strings.NewReader(tc.input) + res, err := ParseTranslation(r) + if tc.err != nil { + assert.ErrorIs(t, err, tc.err) + return + } + assert.Equal(t, tc.expected, res) + }) + } +} + +func TestParseTranslationFromBytes(t *testing.T) { + tests := []struct { + name string + input string + expected TranslationRoot + err error + }{ + { + name: "success create translation", + input: translatedManifest, + expected: expected, + err: nil, + }, + { + name: "fail not valid YAML", + input: "--", + expected: TranslationRoot{}, + err: ErrFailedToParseManifestTranslation, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := ParseTranslationFromBytes([]byte(tc.input)) + if tc.err != nil { + assert.ErrorIs(t, err, tc.err) + return + } + assert.Equal(t, tc.expected, res) + }) + } +} + +func TestMustParseTransSystemFromBytes(t *testing.T) { + tests := []struct { + name string + input string + expected TranslationRoot + fails bool + }{ + { + name: "success create translation", + input: translatedManifest, + expected: expected, + fails: false, + }, + { + name: "fail not valid YAML", + input: "--", + expected: TranslationRoot{}, + fails: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + if tc.fails { + assert.Panics(t, func() { + _ = MustParseTranslationFromBytes([]byte(tc.input)) + }) + return + } + + res := MustParseTranslationFromBytes([]byte(tc.input)) + assert.Equal(t, tc.expected, res) + }) + } +} diff --git a/server/pkg/plugin/manifest/schema_gen.go b/server/pkg/plugin/manifest/schema_gen.go new file mode 100644 index 000000000..ca2434233 --- /dev/null +++ b/server/pkg/plugin/manifest/schema_gen.go @@ -0,0 +1,105 @@ +package manifest + +// generated by "/var/folders/lz/nhqy382n28g31wb4f_40gbmc0000gp/T/go-build773179862/b001/exe/schematyper -o schema_gen.go --package manifest ../../../schemas/plugin_manifest.json" -- DO NOT EDIT + +type Choice struct { + Icon string `json:"icon,omitempty"` + Key string `json:"key"` + Label string `json:"label,omitempty"` +} + +type Extendable struct { + Horizontally *bool `json:"horizontally,omitempty"` + Vertically *bool `json:"vertically,omitempty"` +} + +type Extension struct { + Description *string `json:"description,omitempty"` + ID ID `json:"id"` + Icon *string `json:"icon,omitempty"` + Name string `json:"name"` + Schema *PropertySchema `json:"schema,omitempty"` + SingleOnly *bool `json:"singleOnly,omitempty"` + Type string `json:"type"` + Visualizer *string `json:"visualizer,omitempty"` + WidgetLayout *WidgetLayout `json:"widgetLayout,omitempty"` +} + +type ID string + +type Id string + +type Location struct { + Area string `json:"area,omitempty"` + Section string `json:"section,omitempty"` + Zone string `json:"zone,omitempty"` +} + +type PropertyCondition struct { + Field string `json:"field"` + Type Valuetype `json:"type"` + Value interface{} `json:"value"` +} + +type PropertyLinkableFields struct { + Latlng *PropertyPointer `json:"latlng,omitempty"` + URL *PropertyPointer `json:"url,omitempty"` +} + +type PropertyPointer struct { + FieldID string `json:"fieldId"` + SchemaGroupID string `json:"schemaGroupId"` +} + +type PropertySchema struct { + Groups []PropertySchemaGroup `json:"groups,omitempty"` + Linkable *PropertyLinkableFields `json:"linkable,omitempty"` + Version float64 `json:"version,omitempty"` +} + +type PropertySchemaField struct { + AvailableIf *PropertyCondition `json:"availableIf,omitempty"` + Choices []Choice `json:"choices,omitempty"` + DefaultValue interface{} `json:"defaultValue,omitempty"` + Description *string `json:"description,omitempty"` + ID ID `json:"id"` + Max *float64 `json:"max,omitempty"` + Min *float64 `json:"min,omitempty"` + Prefix *string `json:"prefix,omitempty"` + Suffix *string `json:"suffix,omitempty"` + Title *string `json:"title"` + Type Valuetype `json:"type"` + UI *string `json:"ui,omitempty"` +} + +type PropertySchemaGroup struct { + AvailableIf *PropertyCondition `json:"availableIf,omitempty"` + Description *string `json:"description,omitempty"` + Fields []PropertySchemaField `json:"fields,omitempty"` + ID ID `json:"id"` + List bool `json:"list,omitempty"` + RepresentativeField *Id `json:"representativeField,omitempty"` + Title string `json:"title"` +} + +type Root struct { + Author *string `json:"author,omitempty"` + Description *string `json:"description,omitempty"` + Extensions []Extension `json:"extensions,omitempty"` + ID ID `json:"id"` + Main *string `json:"main,omitempty"` + Name string `json:"name"` + Repository *string `json:"repository,omitempty"` + Schema *PropertySchema `json:"schema,omitempty"` + System bool `json:"system,omitempty"` + Version string `json:"version,omitempty"` +} + +type Valuetype string + +type WidgetLayout struct { + DefaultLocation *Location `json:"defaultLocation,omitempty"` + Extendable *Extendable `json:"extendable,omitempty"` + Extended *bool `json:"extended,omitempty"` + Floating bool `json:"floating,omitempty"` +} diff --git a/server/pkg/plugin/manifest/schema_translation.go b/server/pkg/plugin/manifest/schema_translation.go new file mode 100644 index 000000000..5ece56ac1 --- /dev/null +++ b/server/pkg/plugin/manifest/schema_translation.go @@ -0,0 +1,275 @@ +package manifest + +import "github.com/reearth/reearth-backend/pkg/i18n" + +type TranslationExtension struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + PropertySchema TranslationPropertySchema `json:"propertySchema,omitempty"` +} + +type TranslationPropertySchema map[string]TranslationPropertySchemaGroup + +type TranslationPropertySchemaField struct { + Choices map[string]string `json:"choices,omitempty"` + Title *string `json:"title,omitempty"` + Description *string `json:"description,omitempty"` + Prefix *string `json:"prefix,omitempty"` + Suffix *string `json:"suffix,omitempty"` +} + +type TranslationPropertySchemaGroup struct { + Description *string `json:"description,omitempty"` + Fields map[string]TranslationPropertySchemaField `json:"fields,omitempty"` + Title *string `json:"title,omitempty"` +} + +type TranslationRoot struct { + Description *string `json:"description,omitempty"` + Extensions map[string]TranslationExtension `json:"extensions,omitempty"` + Name *string `json:"name,omitempty"` + Schema TranslationPropertySchema `json:"schema,omitempty"` +} + +type TranslationMap map[string]TranslationRoot + +type TranslatedExtension struct { + Description i18n.String + Name i18n.String + PropertySchema TranslatedPropertySchema +} + +type TranslatedPropertySchema map[string]*TranslatedPropertySchemaGroup + +type TranslatedPropertySchemaField struct { + Choices map[string]i18n.String + Description i18n.String + Title i18n.String + Prefix i18n.String + Suffix i18n.String +} + +type TranslatedPropertySchemaGroup struct { + Description i18n.String + Fields map[string]*TranslatedPropertySchemaField + Title i18n.String +} + +type TranslatedRoot struct { + Description i18n.String + Extensions map[string]*TranslatedExtension + Name i18n.String + Schema TranslatedPropertySchema +} + +func (tm TranslationMap) Translated() (res TranslatedRoot) { + if len(tm) == 0 { + return TranslatedRoot{} + } + + res.Name = tm.name() + res.Description = tm.description() + res.Schema.setPropertySchema(tm.propertySchemas("")) + + for l, t := range tm { + for eid, e := range t.Extensions { + te := res.getOrCreateExtension(eid) + + if e.Name != nil { + if te.Name == nil { + te.Name = i18n.String{} + } + te.Name[l] = *e.Name + } + + if e.Description != nil { + if te.Description == nil { + te.Description = i18n.String{} + } + te.Description[l] = *e.Description + } + + if len(e.PropertySchema) > 0 { + te.PropertySchema.setPropertySchema(tm.propertySchemas(eid)) + } + } + } + + return res +} + +func (tm TranslationMap) TranslatedRef() *TranslatedRoot { + if len(tm) == 0 { + return nil + } + + t := tm.Translated() + return &t +} + +func (t TranslationRoot) propertySchema(eid string) (res TranslationPropertySchema) { + if eid == "" { + return t.Schema + } + for eid2, e := range t.Extensions { + if eid == eid2 { + return e.PropertySchema + } + } + return +} + +func (tm TranslationMap) name() i18n.String { + name := i18n.String{} + for l, t := range tm { + if t.Name == nil { + continue + } + name[l] = *t.Name + } + if len(name) == 0 { + return nil + } + return name +} + +func (tm TranslationMap) description() i18n.String { + desc := i18n.String{} + for l, t := range tm { + if t.Description == nil { + continue + } + desc[l] = *t.Description + } + if len(desc) == 0 { + return nil + } + return desc +} + +func (tm TranslationMap) propertySchemas(eid string) map[string]TranslationPropertySchema { + if len(tm) == 0 { + return nil + } + + res := make(map[string]TranslationPropertySchema) + for l, tl := range tm { + s := tl.propertySchema(eid) + res[l] = s + } + return res +} + +func (t *TranslatedRoot) getOrCreateExtension(eid string) *TranslatedExtension { + if eid == "" { + return nil + } + if t.Extensions == nil { + t.Extensions = map[string]*TranslatedExtension{} + } + if e, ok := t.Extensions[eid]; ok { + return e + } + g := &TranslatedExtension{} + t.Extensions[eid] = g + return g +} + +func (t *TranslatedPropertySchema) getOrCreateGroup(gid string) *TranslatedPropertySchemaGroup { + if gid == "" { + return nil + } + if t == nil || *t == nil { + *t = TranslatedPropertySchema{} + } + if g := (*t)[gid]; g != nil { + return g + } + g := &TranslatedPropertySchemaGroup{} + (*t)[gid] = g + return g +} + +func (t *TranslatedPropertySchemaGroup) getOrCreateField(fid string) *TranslatedPropertySchemaField { + if fid == "" { + return nil + } + if t.Fields == nil { + t.Fields = map[string]*TranslatedPropertySchemaField{} + } + if f := t.Fields[fid]; f != nil { + return f + } + f := &TranslatedPropertySchemaField{} + t.Fields[fid] = f + return f +} + +func (t *TranslatedPropertySchema) setPropertySchema(schemas map[string]TranslationPropertySchema) { + for l, tl := range schemas { + for gid, g := range tl { + if t == nil || *t == nil { + *t = TranslatedPropertySchema{} + } + + tg := t.getOrCreateGroup(gid) + + if g.Title != nil { + if tg.Title == nil { + tg.Title = i18n.String{} + } + tg.Title[l] = *g.Title + } + + if g.Description != nil { + if tg.Description == nil { + tg.Description = i18n.String{} + } + tg.Description[l] = *g.Description + } + + for fid, f := range g.Fields { + tf := tg.getOrCreateField(fid) + if f.Title != nil { + if tf.Title == nil { + tf.Title = i18n.String{} + } + tf.Title[l] = *f.Title + } + + if f.Description != nil { + if tf.Description == nil { + tf.Description = i18n.String{} + } + tf.Description[l] = *f.Description + } + + if f.Prefix != nil { + if tf.Prefix == nil { + tf.Prefix = i18n.String{} + } + tf.Prefix[l] = *f.Prefix + } + + if f.Suffix != nil { + if tf.Suffix == nil { + tf.Suffix = i18n.String{} + } + tf.Suffix[l] = *f.Suffix + } + + if len(f.Choices) > 0 { + if tf.Choices == nil { + tf.Choices = map[string]i18n.String{} + } + for cid, c := range f.Choices { + if tf.Choices[cid] == nil { + tf.Choices[cid] = i18n.String{} + } + tf.Choices[cid][l] = c + } + } + } + } + } +} diff --git a/server/pkg/plugin/manifest/schema_translation_test.go b/server/pkg/plugin/manifest/schema_translation_test.go new file mode 100644 index 000000000..b1934cc3d --- /dev/null +++ b/server/pkg/plugin/manifest/schema_translation_test.go @@ -0,0 +1,197 @@ +package manifest + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestTranslationMap_Translated(t *testing.T) { + m := TranslationMap{ + "en": TranslationRoot{ + Name: lo.ToPtr("Name"), + Description: lo.ToPtr("desc"), + Extensions: map[string]TranslationExtension{ + "a": { + Name: lo.ToPtr("ext"), + PropertySchema: TranslationPropertySchema{ + "default": { + Fields: map[string]TranslationPropertySchemaField{ + "foo": { + Title: lo.ToPtr("foo"), + Choices: map[string]string{"A": "AAA", "B": "BBB"}, + Prefix: lo.ToPtr("P"), + Suffix: lo.ToPtr("S"), + }, + "hoge": {Title: lo.ToPtr("hoge")}, + }, + }, + }, + }, + }, + Schema: TranslationPropertySchema{ + "another": { + Fields: map[string]TranslationPropertySchemaField{ + "foo": {Choices: map[string]string{"A": "AAA"}}, + }, + }, + }, + }, + "ja": TranslationRoot{ + Name: lo.ToPtr("ๅๅ‰"), + Extensions: map[string]TranslationExtension{ + "a": { + Name: lo.ToPtr("extJA"), + Description: lo.ToPtr("DESC!"), + PropertySchema: TranslationPropertySchema{ + "default": { + Fields: map[string]TranslationPropertySchemaField{ + "foo": { + Title: lo.ToPtr("foo!"), + Description: lo.ToPtr("DESC"), + Choices: map[string]string{"B": "BBB!", "C": "CCC!"}, + Prefix: lo.ToPtr("p"), + }, + "bar": {Title: lo.ToPtr("bar!")}, + }, + }, + }, + }, + "b": { + Name: lo.ToPtr("ext2"), + PropertySchema: TranslationPropertySchema{}, + }, + }, + Schema: TranslationPropertySchema{ + "default": { + Fields: map[string]TranslationPropertySchemaField{ + "a": {Title: lo.ToPtr("ใ‚")}, + }, + }, + }, + }, + "zh-CN": TranslationRoot{ + Name: lo.ToPtr("ๅ‘ฝๅ"), + Schema: TranslationPropertySchema{ + "another": { + Description: lo.ToPtr("ๆ่ฟฐ"), + }, + }, + }, + } + + expected := TranslatedRoot{ + Name: i18n.String{"en": "Name", "ja": "ๅๅ‰", "zh-CN": "ๅ‘ฝๅ"}, + Description: i18n.String{"en": "desc"}, + Extensions: map[string]*TranslatedExtension{ + "a": { + Name: i18n.String{"en": "ext", "ja": "extJA"}, + Description: i18n.String{"ja": "DESC!"}, + PropertySchema: TranslatedPropertySchema{ + "default": &TranslatedPropertySchemaGroup{ + Fields: map[string]*TranslatedPropertySchemaField{ + "foo": { + Title: i18n.String{"en": "foo", "ja": "foo!"}, + Description: i18n.String{"ja": "DESC"}, + Choices: map[string]i18n.String{ + "A": {"en": "AAA"}, + "B": {"en": "BBB", "ja": "BBB!"}, + "C": {"ja": "CCC!"}, + }, + Prefix: i18n.String{"en": "P", "ja": "p"}, + Suffix: i18n.String{"en": "S"}, + }, + "hoge": { + Title: i18n.String{"en": "hoge"}, + }, + "bar": { + Title: i18n.String{"ja": "bar!"}, + }, + }, + }, + }, + }, + "b": { + Name: i18n.String{"ja": "ext2"}, + }, + }, + Schema: TranslatedPropertySchema{ + "default": { + Title: nil, + Description: nil, + Fields: map[string]*TranslatedPropertySchemaField{ + "a": {Title: i18n.String{"ja": "ใ‚"}}, + }, + }, + "another": { + Title: nil, + Description: i18n.String{"zh-CN": "ๆ่ฟฐ"}, + Fields: map[string]*TranslatedPropertySchemaField{ + "foo": {Choices: map[string]i18n.String{"A": {"en": "AAA"}}}, + }, + }, + }, + } + + assert.Equal(t, expected, m.Translated()) + assert.Equal(t, TranslatedRoot{}, TranslationMap{}.Translated()) + assert.Equal(t, TranslatedRoot{}, TranslationMap(nil).Translated()) +} + +func TestTranslatedPropertySchema_getOrCreateGroup(t *testing.T) { + target := TranslatedPropertySchema{} + expected := TranslatedPropertySchema{ + "a": {Title: i18n.String{"ja": "A"}}, + } + + group := target.getOrCreateGroup("a") + assert.Equal(t, &TranslatedPropertySchemaGroup{}, group) + + group.Title = i18n.String{"ja": "A"} + assert.Equal(t, expected, target) +} + +func TestTranslatedPropertySchema_getOrCreateField(t *testing.T) { + target := TranslatedPropertySchemaGroup{} + expected := TranslatedPropertySchemaGroup{ + Fields: map[string]*TranslatedPropertySchemaField{ + "a": {Title: i18n.String{"ja": "A"}}, + }, + } + + field := target.getOrCreateField("a") + assert.Equal(t, &TranslatedPropertySchemaField{}, field) + + field.Title = i18n.String{"ja": "A"} + assert.Equal(t, expected, target) +} + +func TestTranslatedPropertySchema_setPropertySchema(t *testing.T) { + target := TranslatedPropertySchema{ + "a": nil, + "b": {}, + } + expected := TranslatedPropertySchema{ + "a": { + Title: i18n.String{"ja": "A"}, + Fields: map[string]*TranslatedPropertySchemaField{ + "f": {Title: i18n.String{"en": "F"}}, + }}, + "b": {Title: i18n.String{"en": "B"}}, + } + + target.setPropertySchema(map[string]TranslationPropertySchema{ + "en": { + "a": { + Fields: map[string]TranslationPropertySchemaField{ + "f": {Title: lo.ToPtr("F")}, + }, + }, + "b": {Title: lo.ToPtr("B")}, + }, + "ja": {"a": {Title: lo.ToPtr("A")}}, + }) + assert.Equal(t, expected, target) +} diff --git a/server/pkg/plugin/manifest/testdata/minimum.yml b/server/pkg/plugin/manifest/testdata/minimum.yml new file mode 100644 index 000000000..f4db3d5cb --- /dev/null +++ b/server/pkg/plugin/manifest/testdata/minimum.yml @@ -0,0 +1,2 @@ +id: aaa +version: 1.1.1 diff --git a/server/pkg/plugin/manifest/testdata/test.yml b/server/pkg/plugin/manifest/testdata/test.yml new file mode 100644 index 000000000..4b918b0ed --- /dev/null +++ b/server/pkg/plugin/manifest/testdata/test.yml @@ -0,0 +1,21 @@ +id: aaa +name: bbb +version: 1.1.1 +extensions: + - id: hoge + type: primitive + visualizer: cesium + schema: + groups: + - id: default + representativeField: a + fields: + - id: a + type: bool + defaultValue: true + availableIf: + field: b + type: number + value: 1 + - id: b + type: number diff --git a/server/pkg/plugin/manifest/testdata/translation.yml b/server/pkg/plugin/manifest/testdata/translation.yml new file mode 100644 index 000000000..d7bbfded9 --- /dev/null +++ b/server/pkg/plugin/manifest/testdata/translation.yml @@ -0,0 +1,30 @@ +{ + "description": "test plugin desc", + "name": "test plugin name", + "extensions": + { + "test_ext": + { + "name": "test ext name", + "propertySchema": + { + "test_ps": + { + "description": "test ps desc", + "title": "test ps title", + "fields": + { + "test_field": + { + "title": "test field name", + "description": "test field desc", + "choices": { "test_key": "test choice value" }, + "prefix": "P", + "suffix": "S", + }, + }, + }, + }, + }, + }, +} diff --git a/server/pkg/plugin/metadata.go b/server/pkg/plugin/metadata.go new file mode 100644 index 000000000..f4ae35388 --- /dev/null +++ b/server/pkg/plugin/metadata.go @@ -0,0 +1,11 @@ +package plugin + +import "time" + +type Metadata struct { + Name string `json:"name"` + Description string `json:"description"` + ThumbnailUrl string `json:"thumbnailUrl"` + Author string `json:"author"` + CreatedAt time.Time `json:"createdAt"` +} diff --git a/server/pkg/plugin/plugin.go b/server/pkg/plugin/plugin.go new file mode 100644 index 000000000..8f1196ee6 --- /dev/null +++ b/server/pkg/plugin/plugin.go @@ -0,0 +1,142 @@ +package plugin + +import ( + "github.com/blang/semver" + "github.com/reearth/reearth-backend/pkg/i18n" +) + +type Plugin struct { + id ID + name i18n.String + author string + description i18n.String + repositoryURL string + extensions map[ExtensionID]*Extension + extensionOrder []ExtensionID + schema *PropertySchemaID +} + +func (p *Plugin) ID() ID { + if p == nil { + return ID{} + } + return p.id +} + +func (p *Plugin) Version() semver.Version { + if p == nil { + return semver.Version{} + } + return p.id.Version() +} + +func (p *Plugin) Scene() *SceneID { + return p.ID().Scene() +} + +func (p *Plugin) Name() i18n.String { + if p == nil { + return nil + } + return p.name.Clone() +} + +func (p *Plugin) Author() string { + if p == nil { + return "" + } + return p.author +} + +func (p *Plugin) Description() i18n.String { + if p == nil { + return nil + } + return p.description.Clone() +} + +func (p *Plugin) RepositoryURL() string { + if p == nil { + return "" + } + return p.repositoryURL +} + +func (p *Plugin) Extensions() []*Extension { + if p == nil || len(p.extensions) == 0 { + return nil + } + + if p.extensionOrder == nil { + return []*Extension{} + } + list := make([]*Extension, 0, len(p.extensions)) + for _, id := range p.extensionOrder { + list = append(list, p.extensions[id]) + } + return list +} + +func (p *Plugin) Extension(id ExtensionID) *Extension { + if p == nil { + return nil + } + + e, ok := p.extensions[id] + if ok { + return e + } + return nil +} + +func (p *Plugin) Schema() *PropertySchemaID { + if p == nil { + return nil + } + return p.schema +} + +func (p *Plugin) PropertySchemas() []PropertySchemaID { + if p == nil { + return nil + } + + ps := make([]PropertySchemaID, 0, len(p.extensions)+1) + if p.schema != nil { + ps = append(ps, *p.schema) + } + for _, e := range p.extensionOrder { + ps = append(ps, p.extensions[e].Schema()) + } + return ps +} + +func (p *Plugin) Clone() *Plugin { + if p == nil { + return nil + } + + var extensions map[ExtensionID]*Extension + if p.extensions != nil { + extensions = make(map[ExtensionID]*Extension, len(p.extensions)) + for _, e := range p.extensions { + extensions[e.ID()] = e.Clone() + } + } + + var extensionOrder []ExtensionID + if p.extensionOrder != nil { + extensionOrder = append([]ExtensionID{}, p.extensionOrder...) + } + + return &Plugin{ + id: p.id.Clone(), + name: p.name.Clone(), + author: p.author, + description: p.description.Clone(), + repositoryURL: p.repositoryURL, + extensions: extensions, + extensionOrder: extensionOrder, + schema: p.schema.CopyRef(), + } +} diff --git a/server/pkg/plugin/plugin_test.go b/server/pkg/plugin/plugin_test.go new file mode 100644 index 000000000..386ef2b1b --- /dev/null +++ b/server/pkg/plugin/plugin_test.go @@ -0,0 +1,137 @@ +package plugin + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/stretchr/testify/assert" +) + +func TestPlugin_Extension(t *testing.T) { + tests := []struct { + name string + plugin *Plugin + key ExtensionID + expected *Extension + }{ + { + name: "exiting extension", + key: "yyy", + plugin: New().ID(MustID("aaa~1.1.1")).Extensions([]*Extension{NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild()}).MustBuild(), + expected: NewExtension().ID("yyy").MustBuild(), + }, + { + name: "not exiting extension", + key: "zzz", + plugin: New().ID(MustID("aaa~1.1.1")).Extensions([]*Extension{NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild()}).MustBuild(), + expected: nil, + }, + { + name: "nil", + key: "zzz", + plugin: nil, + expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.plugin.Extension(tc.key)) + }) + } +} + +func TestPlugin_PropertySchemas(t *testing.T) { + ps1 := MustPropertySchemaID("hoge~0.1.0/a") + ps2 := MustPropertySchemaID("hoge~0.1.0/b") + ps3 := MustPropertySchemaID("hoge~0.1.0/c") + + tests := []struct { + name string + plugin *Plugin + expected []PropertySchemaID + }{ + { + name: "normal", + plugin: New().ID(MustID("aaa~1.1.1")).Schema(&ps1).Extensions([]*Extension{NewExtension().ID("xxx").Schema(ps2).MustBuild(), NewExtension().ID("yyy").Schema(ps3).MustBuild()}).MustBuild(), + expected: []PropertySchemaID{ps1, ps2, ps3}, + }, + { + name: "no plugin property schema", + plugin: New().ID(MustID("aaa~1.1.1")).Extensions([]*Extension{NewExtension().ID("xxx").Schema(ps2).MustBuild(), NewExtension().ID("yyy").Schema(ps3).MustBuild()}).MustBuild(), + expected: []PropertySchemaID{ps2, ps3}, + }, + { + name: "nil", + plugin: nil, + expected: []PropertySchemaID(nil), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.plugin.PropertySchemas()) + }) + } +} + +func TestPlugin_Author(t *testing.T) { + p := New().ID(MustID("aaa~1.1.1")).Author("xx").MustBuild() + assert.Equal(t, "xx", p.Author()) +} + +func TestPlugin_ID(t *testing.T) { + assert.Equal(t, New().ID(MustID("xxx~1.1.1")).MustBuild().ID(), MustID("xxx~1.1.1")) +} + +func TestPlugin_Clone(t *testing.T) { + tests := []struct { + name string + target *Plugin + }{ + { + name: "ok", + target: &Plugin{ + id: MustID("hoge~0.1.0"), + name: i18n.StringFrom("hoge"), + extensions: map[ExtensionID]*Extension{ + ExtensionID("foo"): { + id: ExtensionID("foo"), + extensionType: ExtensionTypeBlock, + schema: MustPropertySchemaID("hoge~0.1.0/foo"), + }, + ExtensionID("bar"): { + id: ExtensionID("bar"), + extensionType: ExtensionTypePrimitive, + schema: MustPropertySchemaID("hoge~0.1.0/bar"), + }, + }, + extensionOrder: []ExtensionID{"foo", "bar"}, + schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + }, + }, + { + name: "empty", + target: &Plugin{}, + }, + { + name: "nil", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + got := tt.target.Clone() + assert.Equal(t, tt.target, got) + if tt.target != nil { + assert.NotSame(t, tt.target, got) + } + }) + } +} diff --git a/server/pkg/plugin/pluginpack/package.go b/server/pkg/plugin/pluginpack/package.go new file mode 100644 index 000000000..cc7544541 --- /dev/null +++ b/server/pkg/plugin/pluginpack/package.go @@ -0,0 +1,95 @@ +package pluginpack + +import ( + "archive/zip" + "bytes" + "io" + "path" + "path/filepath" + "regexp" + + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +const manfiestFilePath = "reearth.yml" + +var translationFileNameRegexp = regexp.MustCompile(`reearth_([a-zA-Z]+(?:-[a-zA-Z]+)?).yml`) + +type Package struct { + Manifest *manifest.Manifest + Files file.Iterator +} + +func PackageFromZip(r io.Reader, scene *plugin.SceneID, sizeLimit int64) (*Package, error) { + b, err := io.ReadAll(io.LimitReader(r, sizeLimit)) + if err != nil { + return nil, rerror.From("zip read error", err) + } + + zr, err := zip.NewReader(bytes.NewReader(b), int64(len(b))) + if err != nil { + return nil, rerror.From("zip open error", err) + } + + basePath := file.ZipBasePath(zr) + + f, err := zr.Open(path.Join(basePath, manfiestFilePath)) + if err != nil { + return nil, rerror.From("manifest open error", err) + } + defer func() { + _ = f.Close() + }() + + translations, err := readTranslation(zr, basePath) + if err != nil { + return nil, err + } + + m, err := manifest.Parse(f, scene, translations.TranslatedRef()) + if err != nil { + return nil, rerror.From("invalid manifest", err) + } + + return &Package{ + Manifest: m, + Files: iterator(file.NewZipReader(zr), basePath), + }, nil +} + +func iterator(a file.Iterator, prefix string) file.Iterator { + return file.NewFilteredIterator(file.NewPrefixIterator(a, prefix), func(p string) bool { + return p == manfiestFilePath || filepath.Ext(p) != ".js" + }) +} + +func readTranslation(fs *zip.Reader, base string) (manifest.TranslationMap, error) { + translationMap := manifest.TranslationMap{} + for _, f := range fs.File { + if filepath.Dir(f.Name) != base { + continue + } + + lang := translationFileNameRegexp.FindStringSubmatch(filepath.Base(f.Name)) + if len(lang) == 0 { + continue + } + langfile, err := f.Open() + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + defer func() { + _ = langfile.Close() + }() + t, err := manifest.ParseTranslation(langfile) + if err != nil { + return nil, err + } + translationMap[lang[1]] = t + } + + return translationMap, nil +} diff --git a/server/pkg/plugin/pluginpack/package_test.go b/server/pkg/plugin/pluginpack/package_test.go new file mode 100644 index 000000000..461107bf6 --- /dev/null +++ b/server/pkg/plugin/pluginpack/package_test.go @@ -0,0 +1,53 @@ +package pluginpack + +import ( + "archive/zip" + "os" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/stretchr/testify/assert" +) + +func TestPackageFromZip(t *testing.T) { + expected := &manifest.Manifest{ + Plugin: plugin.New(). + ID(plugin.MustID("testplugin~1.0.1")). + Name(i18n.String{"en": "testplugin", "ja": "ใƒ†ใ‚นใƒˆใƒ—ใƒฉใ‚ฐใ‚คใƒณ", "zh-CN": "ๆต‹่ฏ•ๆ’ไปถ"}). + MustBuild(), + } + + f, err := os.Open("testdata/test.zip") + assert.NoError(t, err) + defer func() { + _ = f.Close() + }() + + p, err := PackageFromZip(f, nil, 10000) + assert.NoError(t, err) + assert.Equal(t, expected, p.Manifest) + + var files []string + for { + n, err := p.Files.Next() + assert.NoError(t, err) + if n == nil { + break + } + files = append(files, n.Path) + } + assert.Equal(t, []string{"index.js"}, files) +} + +func TestPackageFromZip2(t *testing.T) { + f, err := os.Open("testdata/test.zip") + assert.NoError(t, err) + defer func() { + _ = f.Close() + }() + + _, err = PackageFromZip(f, nil, 100) + assert.ErrorIs(t, err, zip.ErrFormat) +} diff --git a/server/pkg/plugin/pluginpack/testdata/test.zip b/server/pkg/plugin/pluginpack/testdata/test.zip new file mode 100644 index 000000000..0d371acbe Binary files /dev/null and b/server/pkg/plugin/pluginpack/testdata/test.zip differ diff --git a/server/pkg/plugin/pluginpack/testdata/test/index.js b/server/pkg/plugin/pluginpack/testdata/test/index.js new file mode 100644 index 000000000..6b2b3db0f --- /dev/null +++ b/server/pkg/plugin/pluginpack/testdata/test/index.js @@ -0,0 +1 @@ +console.log("hello world"); diff --git a/server/pkg/plugin/pluginpack/testdata/test/reearth.yml b/server/pkg/plugin/pluginpack/testdata/test/reearth.yml new file mode 100644 index 000000000..65acfab82 --- /dev/null +++ b/server/pkg/plugin/pluginpack/testdata/test/reearth.yml @@ -0,0 +1 @@ +{ "id": "testplugin", "version": "1.0.1", "name": "testplugin" } diff --git a/server/pkg/plugin/pluginpack/testdata/test/reearth_ja.yml b/server/pkg/plugin/pluginpack/testdata/test/reearth_ja.yml new file mode 100644 index 000000000..3b622fa8d --- /dev/null +++ b/server/pkg/plugin/pluginpack/testdata/test/reearth_ja.yml @@ -0,0 +1 @@ +{ "name": "ใƒ†ใ‚นใƒˆใƒ—ใƒฉใ‚ฐใ‚คใƒณ" } diff --git a/server/pkg/plugin/pluginpack/testdata/test/reearth_zh-CN.yml b/server/pkg/plugin/pluginpack/testdata/test/reearth_zh-CN.yml new file mode 100644 index 000000000..d1be24318 --- /dev/null +++ b/server/pkg/plugin/pluginpack/testdata/test/reearth_zh-CN.yml @@ -0,0 +1 @@ +{ "name": "ๆต‹่ฏ•ๆ’ไปถ" } diff --git a/server/pkg/plugin/pluginpack/testdata/test/test/foo.bar b/server/pkg/plugin/pluginpack/testdata/test/test/foo.bar new file mode 100644 index 000000000..9daeafb98 --- /dev/null +++ b/server/pkg/plugin/pluginpack/testdata/test/test/foo.bar @@ -0,0 +1 @@ +test diff --git a/server/pkg/plugin/repourl/repos.go b/server/pkg/plugin/repourl/repos.go new file mode 100644 index 000000000..c13af9413 --- /dev/null +++ b/server/pkg/plugin/repourl/repos.go @@ -0,0 +1,76 @@ +package repourl + +import ( + "fmt" + "net/url" + "strings" +) + +var repos = map[string]func(s string) *URL{ + "github.com": github, +} + +var reposArchive = map[string]func(u *URL) *url.URL{ + "github.com": githubArchive, +} + +func github(p string) *URL { + s := strings.SplitN(p, "/", 3) + if len(s) < 2 { + return nil + } + + ref := "" + if len(s) == 3 { + s2 := strings.Split(s[2], "/") + if len(s2) == 1 { + ref = "heads/" + s2[0] + } + // tree/* + if len(s2) >= 2 && s2[0] == "tree" { + // unknown whether it is a branch name or a tag name + ref = "heads/" + s2[1] + } + // releases/tag/* + if len(s2) >= 3 && s2[0] == "release" && s2[1] == "tag" { + ref = "tags/" + s2[2] + } + // archive/*.zip + if len(s2) == 2 && s2[0] == "archive" { + ref = fileNameWithoutExtension(s2[1]) + } + // archive/refs/*/*.zip + if len(s2) == 4 && s2[0] == "archive" && s2[1] == "refs" { + ref = s2[2] + "/" + fileNameWithoutExtension(s2[3]) + } + } + + return &URL{ + Host: "github.com", + Owner: s[0], + Repo: strings.TrimSuffix(s[1], ".git"), + Ref: ref, + } +} + +func githubArchive(u *URL) *url.URL { + r := u.Ref + if r == "" { + r = "refs/heads/main" + } else if c := u.Commit(); c == "" { + r = "refs/" + r + } + + return &url.URL{ + Scheme: "https", + Host: "github.com", + Path: fmt.Sprintf("%s/%s/archive/%s.zip", u.Owner, u.Repo, r), + } +} + +func fileNameWithoutExtension(fileName string) string { + if pos := strings.LastIndexByte(fileName, '.'); pos != -1 { + return fileName[:pos] + } + return fileName +} diff --git a/server/pkg/plugin/repourl/repourl.go b/server/pkg/plugin/repourl/repourl.go new file mode 100644 index 000000000..31c35b36b --- /dev/null +++ b/server/pkg/plugin/repourl/repourl.go @@ -0,0 +1,136 @@ +package repourl + +import ( + "errors" + "net/url" + "strings" +) + +// URL is a URL of specific Git repository on well-known hosting services. +type URL struct { + Host string + Owner string + Repo string + // Ref represents ref of Git. There are 3 patterns: commit hash, "heads/BRANCH", and "tags/TAG". + Ref string +} + +var ( + ErrInvalidURL = errors.New("invalid repository url") + ErrUnsupportedHost = errors.New("unsupported host") +) + +func New(u *url.URL) (*URL, error) { + if u == nil { + return nil, nil + } + + h := u.Host + p := strings.TrimPrefix(u.Path, "/") + if u.Scheme == "" { + // github.com/aaa/bbb + s := strings.SplitN(u.Path, "/", 2) + if len(p) < 2 { + return nil, ErrInvalidURL + } + h = s[0] + p = s[1] + } else if u.Scheme != "http" && u.Scheme != "https" { + return nil, ErrInvalidURL + } + + var r *URL + if f := repos[h]; f != nil { + r = f(p) + } else { + return nil, ErrUnsupportedHost + } + + if r == nil { + return nil, ErrInvalidURL + } + return r, nil +} + +func Must(u *url.URL) *URL { + u2, err := New(u) + if err != nil { + panic(err) + } + return u2 +} + +func Parse(s string) (*URL, error) { + u, err := url.Parse(s) + if err != nil { + return nil, ErrInvalidURL + } + return New(u) +} + +func MustParse(s string) *URL { + u, err := Parse(s) + if err != nil { + panic(err) + } + return u +} + +func (u *URL) String() string { + if u == nil || u.Host == "" || u.Owner == "" || u.Repo == "" { + return "" + } + sb := strings.Builder{} + sb.WriteString(u.Host) + sb.WriteRune('/') + sb.WriteString(u.Owner) + sb.WriteRune('/') + sb.WriteString(u.Repo) + if u.Ref != "" { + sb.WriteRune('/') + sb.WriteString(u.Ref) + } + return sb.String() +} + +func (u *URL) Head() string { + if u == nil || u.Ref == "" { + return "" + } + h := strings.TrimPrefix(u.Ref, "heads/") + if len(h) == len(u.Ref) { + return "" // ref is not a head + } + return h +} + +func (u *URL) Tag() string { + if u == nil || u.Ref == "" { + return "" + } + h := strings.TrimPrefix(u.Ref, "tags/") + if len(h) == len(u.Ref) { + return "" // ref is not a tag + } + return h +} + +func (u *URL) Commit() string { + if u == nil || u.Ref == "" || strings.Contains(u.Ref, "/") { + return "" + } + return u.Ref +} + +func (u *URL) ArchiveURL() *url.URL { + if u == nil { + return nil + } + + f := reposArchive[u.Host] + if f == nil { + return nil + } + + return f(u) +} diff --git a/server/pkg/plugin/repourl/repourl_test.go b/server/pkg/plugin/repourl/repourl_test.go new file mode 100644 index 000000000..6d1ad0103 --- /dev/null +++ b/server/pkg/plugin/repourl/repourl_test.go @@ -0,0 +1,304 @@ +package repourl + +import ( + "net/url" + "testing" + + "github.com/stretchr/testify/assert" +) + +var cases = []struct { + Name string + Input string + Expected *URL + Err error +}{ + { + Name: "github.com/aaaa/bbbb", + Input: "https://github.com/aaaa/bbbb", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + }, + }, + { + Name: "github.com/aaaa/bbbb.git", + Input: "https://github.com/aaaa/bbbb.git", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + }, + }, + { + Name: "github.com/aaaa/bbbb/tree/cccc", + Input: "https://github.com/aaaa/bbbb/tree/cccc", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + Ref: "heads/cccc", + }, + }, + { + Name: "github.com/aaaa/bbbb/tree/cccc/dddd", + Input: "https://github.com/aaaa/bbbb/tree/cccc/dddd", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + Ref: "heads/cccc", + }, + }, + { + Name: "github.com/aaaa/bbbb/archive/cccc.zip", + Input: "https://github.com/aaaa/bbbb/archive/cccc.zip", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + Ref: "cccc", + }, + }, + { + Name: "github.com/aaaa/bbbb/archive/refs/heads/cccc.zip", + Input: "https://github.com/aaaa/bbbb/archive/refs/heads/cccc.zip", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + Ref: "heads/cccc", + }, + }, + { + Name: "github.com/aaaa/bbbb/archive/refs/tags/cccc.zip", + Input: "https://github.com/aaaa/bbbb/archive/refs/tags/cccc.zip", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + Ref: "tags/cccc", + }, + }, + { + Name: "nil", + Input: "", + Err: ErrInvalidURL, + }, + { + Name: "cannot parsed URL", + Input: "", + Err: ErrInvalidURL, + }, + { + Name: "invalid URL", + Input: "https://github.com/bbb", + Err: ErrInvalidURL, + }, + { + Name: "unsupported host", + Input: "https://aaaa.com/xxx", + Err: ErrUnsupportedHost, + }, +} + +func TestNew(t *testing.T) { + // nil + u, err := New(nil) + assert.NoError(t, err) + assert.Nil(t, u) + + for _, tc := range cases { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + ur, _ := url.Parse(tc.Input) + u, err := New(ur) + if tc.Err != nil { + assert.ErrorIs(t, err, tc.Err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.Expected, u) + } + }) + } +} + +func TestMust(t *testing.T) { + for _, tc := range cases { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + ur, _ := url.Parse(tc.Input) + if tc.Err != nil { + assert.PanicsWithError(t, tc.Err.Error(), func() { + _ = Must(ur) + }) + } else { + assert.Equal(t, tc.Expected, Must(ur)) + } + }) + } +} + +func TestParse(t *testing.T) { + for _, tc := range cases { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + u, err := Parse(tc.Input) + if tc.Err != nil { + assert.ErrorIs(t, err, tc.Err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.Expected, u) + } + }) + } +} + +func TestMustParse(t *testing.T) { + for _, tc := range cases { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + if tc.Err != nil { + assert.PanicsWithError(t, tc.Err.Error(), func() { + _ = MustParse(tc.Input) + }) + } else { + assert.Equal(t, tc.Expected, MustParse(tc.Input)) + } + }) + } +} + +func TestURL_String(t *testing.T) { + assert.Equal(t, "", (*URL)(nil).String()) + assert.Equal(t, "", (&URL{}).String()) + assert.Equal(t, "", (&URL{Host: "github.com"}).String()) + assert.Equal(t, "", (&URL{Host: "github.com", Owner: "aaa"}).String()) + assert.Equal(t, "", (&URL{Host: "github.com", Repo: "bbb"}).String()) + assert.Equal(t, "github.com/aaa/bbb", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + }).String()) + assert.Equal(t, "github.com/aaa/bbb/ccc", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "ccc", + }).String()) +} + +func TestURL_Head(t *testing.T) { + assert.Equal(t, "", (*URL)(nil).Head()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "", + }).Head()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "ccc", + }).Head()) + assert.Equal(t, "ccc", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "heads/ccc", + }).Head()) +} + +func TestURL_Tag(t *testing.T) { + assert.Equal(t, "", (*URL)(nil).Tag()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "", + }).Tag()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "ccc", + }).Tag()) + assert.Equal(t, "ccc", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "tags/ccc", + }).Tag()) +} + +func TestURL_Commit(t *testing.T) { + assert.Equal(t, "", (*URL)(nil).Commit()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "", + }).Commit()) + assert.Equal(t, "ccc", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "ccc", + }).Commit()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "heads/ccc", + }).Commit()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "tags/ccc", + }).Commit()) +} + +func TestURL_ArchiveURL(t *testing.T) { + cases := []struct { + Name string + Input *URL + Expected string + }{ + { + Name: "github.com/aaaa/bbbb", + Input: &URL{Host: "github.com", Owner: "aaaa", Repo: "bbbb", Ref: ""}, + Expected: "https://github.com/aaaa/bbbb/archive/refs/heads/main.zip", + }, + { + Name: "github.com/aaaa/ccc", + Input: &URL{Host: "github.com", Owner: "aaaa", Repo: "bbbb", Ref: "ccc"}, + Expected: "https://github.com/aaaa/bbbb/archive/ccc.zip", + }, + { + Name: "github.com/aaaa/bbbb/heads/cccc", + Input: &URL{Host: "github.com", Owner: "aaaa", Repo: "bbbb", Ref: "heads/ccc"}, + Expected: "https://github.com/aaaa/bbbb/archive/refs/heads/ccc.zip", + }, + { + Name: "github.com/aaaa/bbbb/tags/ccc", + Input: &URL{Host: "github.com", Owner: "aaaa", Repo: "bbbb", Ref: "tags/ccc"}, + Expected: "https://github.com/aaaa/bbbb/archive/refs/tags/ccc.zip", + }, + } + + for _, tt := range cases { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.Expected, tt.Input.ArchiveURL().String()) + }) + } +} diff --git a/server/pkg/plugin/widget.go b/server/pkg/plugin/widget.go new file mode 100644 index 000000000..9c8cc4906 --- /dev/null +++ b/server/pkg/plugin/widget.go @@ -0,0 +1,103 @@ +package plugin + +type WidgetZoneType string +type WidgetSectionType string +type WidgetAreaType string + +const ( + WidgetZoneInner WidgetZoneType = "inner" + WidgetZoneOuter WidgetZoneType = "outer" + WidgetSectionLeft WidgetSectionType = "left" + WidgetSectionCenter WidgetSectionType = "center" + WidgetSectionRight WidgetSectionType = "right" + WidgetAreaTop WidgetAreaType = "top" + WidgetAreaMiddle WidgetAreaType = "middle" + WidgetAreaBottom WidgetAreaType = "bottom" +) + +type WidgetLayout struct { + horizontallyExtendable bool + verticallyExtendable bool + extended bool + floating bool + defaultLocation *WidgetLocation +} + +func (l WidgetLayout) Extendable(loc WidgetLocation) bool { + return l.HorizontallyExtendable() && loc.Horizontal() || l.VerticallyExtendable() && loc.Vertical() +} + +func NewWidgetLayout(horizontallyExtendable, verticallyExtendable, extended, floating bool, defaultLocation *WidgetLocation) WidgetLayout { + return WidgetLayout{ + horizontallyExtendable: horizontallyExtendable, + verticallyExtendable: verticallyExtendable, + extended: extended, + floating: floating, + defaultLocation: defaultLocation.Clone(), + } +} + +func (l WidgetLayout) Ref() *WidgetLayout { + return &l +} + +func (l WidgetLayout) HorizontallyExtendable() bool { + return l.horizontallyExtendable +} + +func (l WidgetLayout) VerticallyExtendable() bool { + return l.verticallyExtendable +} + +func (l WidgetLayout) Extended() bool { + return l.extended +} + +func (l WidgetLayout) Floating() bool { + return l.floating +} + +func (l WidgetLayout) DefaultLocation() *WidgetLocation { + if l.defaultLocation == nil { + return nil + } + return l.defaultLocation.Clone() +} + +func (l *WidgetLayout) Clone() *WidgetLayout { + if l == nil { + return nil + } + return &WidgetLayout{ + horizontallyExtendable: l.horizontallyExtendable, + verticallyExtendable: l.verticallyExtendable, + extended: l.extended, + floating: l.floating, + defaultLocation: l.defaultLocation.Clone(), + } +} + +type WidgetLocation struct { + Zone WidgetZoneType + Section WidgetSectionType + Area WidgetAreaType +} + +func (l WidgetLocation) Horizontal() bool { + return l.Section == WidgetSectionCenter +} + +func (l WidgetLocation) Vertical() bool { + return l.Area == WidgetAreaMiddle +} + +func (l *WidgetLocation) Clone() *WidgetLocation { + if l == nil { + return nil + } + return &WidgetLocation{ + Zone: l.Zone, + Section: l.Section, + Area: l.Area, + } +} diff --git a/server/pkg/project/builder.go b/server/pkg/project/builder.go new file mode 100644 index 000000000..47076aa91 --- /dev/null +++ b/server/pkg/project/builder.go @@ -0,0 +1,138 @@ +package project + +import ( + "net/url" + "time" + + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +type Builder struct { + p *Project +} + +func New() *Builder { + return &Builder{p: &Project{publishmentStatus: PublishmentStatusPrivate}} +} + +func (b *Builder) Build() (*Project, error) { + if b.p.id.IsNil() { + return nil, ErrInvalidID + } + if b.p.alias != "" && !CheckAliasPattern(b.p.alias) { + return nil, ErrInvalidAlias + } + if b.p.updatedAt.IsZero() { + b.p.updatedAt = b.p.CreatedAt() + } + return b.p, nil +} + +func (b *Builder) MustBuild() *Project { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *Builder) ID(id ID) *Builder { + b.p.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.p.id = NewID() + return b +} + +func (b *Builder) IsArchived(isArchived bool) *Builder { + b.p.isArchived = isArchived + return b +} + +func (b *Builder) IsBasicAuthActive(isBasicAuthActive bool) *Builder { + b.p.isBasicAuthActive = isBasicAuthActive + return b +} + +func (b *Builder) BasicAuthUsername(basicAuthUsername string) *Builder { + b.p.basicAuthUsername = basicAuthUsername + return b +} + +func (b *Builder) BasicAuthPassword(basicAuthPassword string) *Builder { + b.p.basicAuthPassword = basicAuthPassword + return b +} + +func (b *Builder) UpdatedAt(updatedAt time.Time) *Builder { + b.p.updatedAt = updatedAt + return b +} + +func (b *Builder) PublishedAt(publishedAt time.Time) *Builder { + b.p.publishedAt = publishedAt + return b +} + +func (b *Builder) Name(name string) *Builder { + b.p.name = name + return b +} + +func (b *Builder) Description(description string) *Builder { + b.p.description = description + return b +} + +func (b *Builder) Alias(alias string) *Builder { + b.p.alias = alias + return b +} + +func (b *Builder) ImageURL(imageURL *url.URL) *Builder { + if imageURL == nil { + b.p.imageURL = nil + } else { + // https://github.com/golang/go/issues/38351 + imageURL2 := *imageURL + b.p.imageURL = &imageURL2 + } + return b +} + +func (b *Builder) PublicTitle(publicTitle string) *Builder { + b.p.publicTitle = publicTitle + return b +} + +func (b *Builder) PublicDescription(publicDescription string) *Builder { + b.p.publicDescription = publicDescription + return b +} + +func (b *Builder) PublicImage(publicImage string) *Builder { + b.p.publicImage = publicImage + return b +} + +func (b *Builder) PublicNoIndex(publicNoIndex bool) *Builder { + b.p.publicNoIndex = publicNoIndex + return b +} + +func (b *Builder) Team(team TeamID) *Builder { + b.p.team = team + return b +} + +func (b *Builder) Visualizer(visualizer visualizer.Visualizer) *Builder { + b.p.visualizer = visualizer + return b +} + +func (b *Builder) PublishmentStatus(publishmentStatus PublishmentStatus) *Builder { + b.p.publishmentStatus = publishmentStatus + return b +} diff --git a/server/pkg/project/builder_test.go b/server/pkg/project/builder_test.go new file mode 100644 index 000000000..3da84227d --- /dev/null +++ b/server/pkg/project/builder_test.go @@ -0,0 +1,398 @@ +package project + +import ( + "net/url" + "reflect" + "testing" + "time" + + "github.com/reearth/reearth-backend/pkg/visualizer" + "github.com/stretchr/testify/assert" +) + +func TestNew(t *testing.T) { + var tb = New() + assert.NotNil(t, tb) +} + +func TestBuilder_ID(t *testing.T) { + var tb = New() + res := tb.ID(NewID()).MustBuild() + assert.NotNil(t, res.ID()) +} + +func TestBuilder_Name(t *testing.T) { + var tb = New().NewID() + res := tb.Name("foo").MustBuild() + assert.Equal(t, "foo", res.Name()) +} + +func TestBuilder_NewID(t *testing.T) { + var tb = New() + res := tb.NewID().MustBuild() + assert.NotNil(t, res.ID()) +} + +func TestBuilder_Alias(t *testing.T) { + var tb = New().NewID() + res := tb.Alias("xxxxx").MustBuild() + assert.Equal(t, "xxxxx", res.Alias()) +} + +func TestBuilder_Description(t *testing.T) { + var tb = New().NewID() + res := tb.Description("desc").MustBuild() + assert.Equal(t, "desc", res.Description()) +} + +func TestBuilder_IsArchived(t *testing.T) { + var tb = New().NewID() + res := tb.IsArchived(true).MustBuild() + assert.True(t, res.IsArchived()) +} + +func TestBuilder_BasicAuthUsername(t *testing.T) { + var tb = New().NewID() + res := tb.BasicAuthUsername("username").MustBuild() + assert.Equal(t, "username", res.BasicAuthUsername()) +} +func TestBuilder_BasicAuthPassword(t *testing.T) { + var tb = New().NewID() + res := tb.BasicAuthPassword("password").MustBuild() + assert.Equal(t, "password", res.BasicAuthPassword()) +} + +func TestBuilder_ImageURL(t *testing.T) { + tests := []struct { + name string + image *url.URL + expectedNil bool + }{ + { + name: "image not nil", + image: &url.URL{}, + expectedNil: false, + }, + { + name: "image is nil", + image: nil, + expectedNil: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + tb := New().NewID() + res := tb.ImageURL(tt.image).MustBuild() + if res.imageURL == nil { + assert.True(t, tt.expectedNil) + } else { + assert.False(t, tt.expectedNil) + } + }) + } +} + +func TestBuilder_Visualizer(t *testing.T) { + var tb = New().NewID() + res := tb.Visualizer(visualizer.VisualizerCesium).MustBuild() + assert.Equal(t, visualizer.VisualizerCesium, res.Visualizer()) +} + +func TestBuilder_Team(t *testing.T) { + var tb = New().NewID() + res := tb.Team(NewTeamID()).MustBuild() + assert.NotNil(t, res.Team()) +} + +func TestBuilder_PublicImage(t *testing.T) { + var tb = New().NewID() + res := tb.PublicImage("xxxxx").MustBuild() + assert.Equal(t, "xxxxx", res.PublicImage()) +} + +func TestBuilder_PublishedAt(t *testing.T) { + var tb = New().NewID() + d := time.Date(1986, 12, 11, 19, 30, 0, 0, time.UTC) + res := tb.PublishedAt(d).MustBuild() + assert.True(t, reflect.DeepEqual(res.PublishedAt(), d)) +} + +func TestBuilder_UpdatedAt(t *testing.T) { + var tb = New().NewID() + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + res := tb.UpdatedAt(d).MustBuild() + assert.True(t, reflect.DeepEqual(res.UpdatedAt(), d)) +} + +func TestBuilder_PublicTitle(t *testing.T) { + var tb = New().NewID() + res := tb.PublicTitle("xxx.aaa").MustBuild() + assert.Equal(t, "xxx.aaa", res.PublicTitle()) +} + +func TestBuilder_PublishmentStatus(t *testing.T) { + var tb = New().NewID() + var x PublishmentStatus = "xxx.aaa" + res := tb.PublishmentStatus("xxx.aaa").MustBuild() + assert.Equal(t, x, res.PublishmentStatus()) +} + +func TestBuilder_PublicDescription(t *testing.T) { + var tb = New().NewID() + res := tb.PublicDescription("pdesc").MustBuild() + assert.Equal(t, "pdesc", res.PublicDescription()) +} + +func TestBuilder_PublicNoIndex(t *testing.T) { + var tb = New().NewID() + res := tb.PublicNoIndex(true).MustBuild() + assert.Equal(t, true, res.PublicNoIndex()) +} + +func TestBuilder_Build(t *testing.T) { + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + i, _ := url.Parse("ttt://xxx.aa/") + pid := NewID() + tid := NewTeamID() + + type args struct { + name, description string + alias, publicTitle string + publicDescription string + publicImage string + id ID + isArchived bool + updatedAt time.Time + publishedAt time.Time + imageURL *url.URL + publicNoIndex bool + team TeamID + visualizer visualizer.Visualizer + publishmentStatus PublishmentStatus + } + + tests := []struct { + name string + args args + expected *Project + err error + }{ + { + name: "build normal project", + args: args{ + name: "xxx.aaa", + description: "ddd", + alias: "aaaaa", + publicTitle: "ttt", + publicDescription: "dddd", + publicImage: "iii", + id: pid, + isArchived: false, + updatedAt: d, + publishedAt: d, + imageURL: i, + publicNoIndex: true, + team: tid, + visualizer: visualizer.VisualizerCesium, + publishmentStatus: "ppp", + }, + expected: &Project{ + id: pid, + description: "ddd", + name: "xxx.aaa", + alias: "aaaaa", + publicTitle: "ttt", + publicDescription: "dddd", + publicImage: "iii", + isArchived: false, + updatedAt: d, + publishedAt: d, + imageURL: i, + publicNoIndex: true, + team: tid, + visualizer: visualizer.VisualizerCesium, + publishmentStatus: "ppp", + }, + }, + { + name: "zero updated at", + args: args{ + id: pid, + }, + expected: &Project{ + id: pid, + updatedAt: pid.Timestamp(), + }, + }, + { + name: "failed invalid id", + err: ErrInvalidID, + }, + { + name: "failed invalid alias", + args: args{ + id: NewID(), + alias: "xxx.aaa", + }, + expected: nil, + err: ErrInvalidAlias, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + p, err := New(). + ID(tt.args.id). + PublicNoIndex(tt.args.publicNoIndex). + PublicDescription(tt.args.publicDescription). + PublishmentStatus(tt.args.publishmentStatus). + PublicTitle(tt.args.publicTitle). + UpdatedAt(tt.args.updatedAt). + PublishedAt(tt.args.publishedAt). + PublicImage(tt.args.publicImage). + Team(tt.args.team). + ImageURL(tt.args.imageURL). + Name(tt.args.name). + Alias(tt.args.alias). + Visualizer(tt.args.visualizer). + UpdatedAt(tt.args.updatedAt). + Description(tt.args.description). + Build() + + if tt.err == nil { + assert.Equal(t, tt.expected, p) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + i, _ := url.Parse("ttt://xxx.aa/") + pid := NewID() + tid := NewTeamID() + + type args struct { + name, description string + alias, publicTitle string + publicDescription string + publicImage string + id ID + isArchived bool + updatedAt time.Time + publishedAt time.Time + imageURL *url.URL + publicNoIndex bool + team TeamID + visualizer visualizer.Visualizer + publishmentStatus PublishmentStatus + } + + tests := []struct { + name string + args args + expected *Project + err error + }{ + { + name: "build normal project", + args: args{ + name: "xxx.aaa", + description: "ddd", + alias: "aaaaa", + publicTitle: "ttt", + publicDescription: "dddd", + publicImage: "iii", + id: pid, + isArchived: false, + updatedAt: d, + publishedAt: d, + imageURL: i, + publicNoIndex: true, + team: tid, + visualizer: visualizer.VisualizerCesium, + publishmentStatus: "ppp", + }, + expected: &Project{ + id: pid, + description: "ddd", + name: "xxx.aaa", + alias: "aaaaa", + publicTitle: "ttt", + publicDescription: "dddd", + publicImage: "iii", + isArchived: false, + updatedAt: d, + publishedAt: d, + imageURL: i, + publicNoIndex: true, + team: tid, + visualizer: visualizer.VisualizerCesium, + publishmentStatus: "ppp", + }, + }, + { + name: "zero updated at", + args: args{ + id: pid, + }, + expected: &Project{ + id: pid, + updatedAt: pid.Timestamp(), + }, + }, + { + name: "failed invalid id", + err: ErrInvalidID, + }, + { + name: "failed invalid alias", + args: args{ + id: NewID(), + alias: "xxx.aaa", + }, + err: ErrInvalidAlias, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + build := func() *Project { + t.Helper() + return New(). + ID(tt.args.id). + PublicNoIndex(tt.args.publicNoIndex). + PublicDescription(tt.args.publicDescription). + PublishmentStatus(tt.args.publishmentStatus). + PublicTitle(tt.args.publicTitle). + UpdatedAt(tt.args.updatedAt). + PublishedAt(tt.args.publishedAt). + PublicImage(tt.args.publicImage). + Team(tt.args.team). + ImageURL(tt.args.imageURL). + Name(tt.args.name). + Alias(tt.args.alias). + Visualizer(tt.args.visualizer). + UpdatedAt(tt.args.updatedAt). + Description(tt.args.description). + MustBuild() + } + + if tt.err != nil { + assert.PanicsWithValue(t, tt.err, func() { _ = build() }) + } else { + assert.Equal(t, tt.expected, build()) + } + }) + } +} diff --git a/server/pkg/project/id.go b/server/pkg/project/id.go new file mode 100644 index 000000000..da16b1f43 --- /dev/null +++ b/server/pkg/project/id.go @@ -0,0 +1,22 @@ +package project + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +type ID = id.ProjectID +type TeamID = id.TeamID + +var NewID = id.NewProjectID +var NewTeamID = id.NewTeamID + +var MustID = id.MustProjectID +var MustTeamID = id.MustTeamID + +var IDFrom = id.ProjectIDFrom +var TeamIDFrom = id.TeamIDFrom + +var IDFromRef = id.ProjectIDFromRef +var TeamIDFromRef = id.TeamIDFromRef + +var ErrInvalidID = id.ErrInvalidID diff --git a/server/pkg/project/project.go b/server/pkg/project/project.go new file mode 100644 index 000000000..d026f0b9f --- /dev/null +++ b/server/pkg/project/project.go @@ -0,0 +1,217 @@ +package project + +import ( + "errors" + "net/url" + "regexp" + "time" + + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +var ( + ErrInvalidAlias error = errors.New("invalid alias") + aliasRegexp = regexp.MustCompile("^[a-zA-Z0-9_-]{5,32}$") +) + +type Project struct { + id ID + isArchived bool + isBasicAuthActive bool + basicAuthUsername string + basicAuthPassword string + updatedAt time.Time + publishedAt time.Time + name string + description string + alias string + imageURL *url.URL + publicTitle string + publicDescription string + publicImage string + publicNoIndex bool + team TeamID + visualizer visualizer.Visualizer + publishmentStatus PublishmentStatus +} + +func (p *Project) ID() ID { + return p.id +} + +func (p *Project) IsArchived() bool { + return p.isArchived +} + +func (p *Project) IsBasicAuthActive() bool { + return p.isBasicAuthActive +} + +func (p *Project) BasicAuthUsername() string { + return p.basicAuthUsername +} + +func (p *Project) BasicAuthPassword() string { + return p.basicAuthPassword +} + +func (p *Project) UpdatedAt() time.Time { + return p.updatedAt +} + +func (p *Project) PublishedAt() time.Time { + return p.publishedAt +} + +func (p *Project) Name() string { + return p.name +} + +func (p *Project) Description() string { + return p.description +} + +func (p *Project) Alias() string { + return p.alias +} + +func (p *Project) ImageURL() *url.URL { + if p == nil || p.imageURL == nil { + return nil + } + // https://github.com/golang/go/issues/38351 + imageURL2 := *p.imageURL + return &imageURL2 +} + +func (p *Project) PublicTitle() string { + return p.publicTitle +} + +func (p *Project) PublicDescription() string { + return p.publicDescription +} + +func (p *Project) PublicImage() string { + return p.publicImage +} + +func (p *Project) PublicNoIndex() bool { + return p.publicNoIndex +} + +func (p *Project) PublishmentStatus() PublishmentStatus { + return p.publishmentStatus +} + +func (p *Project) Team() TeamID { + return p.team +} + +func (p *Project) CreatedAt() time.Time { + return p.id.Timestamp() +} + +func (p *Project) Visualizer() visualizer.Visualizer { + return p.visualizer +} + +func (p *Project) SetArchived(isArchived bool) { + p.isArchived = isArchived +} + +func (p *Project) SetIsBasicAuthActive(isBasicAuthActive bool) { + p.isBasicAuthActive = isBasicAuthActive +} + +func (p *Project) SetBasicAuthUsername(basicAuthUsername string) { + p.basicAuthUsername = basicAuthUsername +} + +func (p *Project) SetBasicAuthPassword(basicAuthPassword string) { + p.basicAuthPassword = basicAuthPassword +} + +func (p *Project) SetUpdatedAt(updatedAt time.Time) { + p.updatedAt = updatedAt +} + +func (p *Project) SetPublishedAt(publishedAt time.Time) { + p.publishedAt = publishedAt +} + +func (p *Project) SetImageURL(imageURL *url.URL) { + if imageURL == nil { + p.imageURL = nil + } else { + // https://github.com/golang/go/issues/38351 + imageURL2 := *imageURL + p.imageURL = &imageURL2 + } +} + +func (p *Project) UpdateName(name string) { + p.name = name +} + +func (p *Project) UpdateDescription(description string) { + p.description = description +} + +func (p *Project) UpdateAlias(alias string) error { + if CheckAliasPattern(alias) { + p.alias = alias + } else { + return ErrInvalidAlias + } + return nil +} + +func (p *Project) UpdatePublicTitle(publicTitle string) { + p.publicTitle = publicTitle +} + +func (p *Project) UpdatePublicDescription(publicDescription string) { + p.publicDescription = publicDescription +} + +func (p *Project) UpdatePublicImage(publicImage string) { + p.publicImage = publicImage +} + +func (p *Project) UpdatePublicNoIndex(publicNoIndex bool) { + p.publicNoIndex = publicNoIndex +} + +func (p *Project) UpdateTeam(team TeamID) { + p.team = team +} + +func (p *Project) UpdateVisualizer(visualizer visualizer.Visualizer) { + p.visualizer = visualizer +} + +func (p *Project) UpdatePublishmentStatus(publishmentStatus PublishmentStatus) { + p.publishmentStatus = publishmentStatus +} + +func (p *Project) PublicName() string { + if p == nil || p.publishmentStatus == PublishmentStatusPrivate { + return "" + } + return p.alias +} + +func (p *Project) MatchWithPublicName(name string) bool { + if p == nil || name == "" || p.publishmentStatus == PublishmentStatusPrivate { + return false + } + if p.publishmentStatus != PublishmentStatusPrivate && p.alias == name { + return true + } + return false +} + +func CheckAliasPattern(alias string) bool { + return alias != "" && aliasRegexp.Match([]byte(alias)) +} diff --git a/server/pkg/project/project_test.go b/server/pkg/project/project_test.go new file mode 100644 index 000000000..848c59f96 --- /dev/null +++ b/server/pkg/project/project_test.go @@ -0,0 +1,340 @@ +package project + +import ( + "net/url" + "testing" + "time" + + "github.com/reearth/reearth-backend/pkg/visualizer" + "github.com/stretchr/testify/assert" +) + +func TestCheckAliasPattern(t *testing.T) { + testCase := []struct { + name, alias string + expexted bool + }{ + { + name: "accepted regex", + alias: "xxxxx", + expexted: true, + }, + { + name: "refused regex", + alias: "xxx", + expexted: false, + }, + } + + for _, tt := range testCase { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expexted, CheckAliasPattern(tt.alias)) + }) + } +} + +func TestProject_MatchWithPublicName(t *testing.T) { + testCase := []struct { + name, n string + p *Project + expexted bool + }{ + { + name: "alias = name, publishmentStatus = public", + n: "aaaaa", + p: &Project{ + publishmentStatus: PublishmentStatusPublic, + alias: "aaaaa", + }, + expexted: true, + }, + { + name: "nil project", + n: "xx", + p: nil, + expexted: false, + }, + { + name: "nil project", + n: "", + p: &Project{ + publishmentStatus: PublishmentStatusPublic, + alias: "aaaaa", + }, + expexted: false, + }, + { + name: "nil project", + n: "aaaaa", + p: &Project{ + publishmentStatus: PublishmentStatusPrivate, + alias: "aaaaa", + }, + expexted: false, + }, + } + + for _, tt := range testCase { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expexted, tt.p.MatchWithPublicName(tt.n)) + }) + } +} + +func TestProject_SetArchived(t *testing.T) { + p := &Project{isArchived: false} + p.SetArchived(true) + assert.Equal(t, true, p.IsArchived()) +} + +func TestProject_SetPublishedAt(t *testing.T) { + p := &Project{} + p.SetPublishedAt(time.Date(1900, 1, 1, 00, 00, 1, 1, time.UTC)) + assert.Equal(t, time.Date(1900, 1, 1, 00, 00, 1, 1, time.UTC), p.publishedAt) +} + +func TestProject_SetUpdatedAt(t *testing.T) { + p := &Project{} + p.SetUpdatedAt(time.Date(1900, 1, 1, 00, 00, 1, 1, time.UTC)) + assert.Equal(t, time.Date(1900, 1, 1, 00, 00, 1, 1, time.UTC), p.UpdatedAt()) +} + +func TestProject_SetImageURL(t *testing.T) { + testCase := []struct { + name string + image *url.URL + p *Project + expectedNil bool + }{ + { + name: "nil image", + image: nil, + p: &Project{}, + expectedNil: true, + }, + { + name: "set new image", + image: &url.URL{}, + p: &Project{}, + expectedNil: false, + }, + } + + for _, tt := range testCase { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + tt.p.SetImageURL(tt.image) + if tt.expectedNil { + assert.Nil(t, tt.p.ImageURL()) + } else { + assert.NotNil(t, tt.p.ImageURL()) + } + }) + } +} + +func TestProject_UpdateName(t *testing.T) { + p := &Project{} + p.UpdateName("foo") + assert.Equal(t, "foo", p.Name()) +} + +func TestProject_UpdateDescription(t *testing.T) { + p := &Project{} + p.UpdateDescription("aaa") + assert.Equal(t, "aaa", p.Description()) +} + +func TestProject_UpdatePublishmentStatus(t *testing.T) { + p := &Project{} + p.UpdatePublishmentStatus(PublishmentStatusPrivate) + assert.Equal(t, PublishmentStatusPrivate, p.PublishmentStatus()) +} + +func TestProject_UpdatePublicNoIndex(t *testing.T) { + p := &Project{} + p.UpdatePublicNoIndex(true) + assert.Equal(t, true, p.PublicNoIndex()) +} + +func TestProject_UpdatePublicDescription(t *testing.T) { + p := &Project{} + p.UpdatePublicDescription("ppp") + assert.Equal(t, "ppp", p.PublicDescription()) +} + +func TestProject_UpdatePublicTitle(t *testing.T) { + p := &Project{} + p.UpdatePublicTitle("ttt") + assert.Equal(t, "ttt", p.PublicTitle()) +} + +func TestProject_UpdateTeam(t *testing.T) { + p := &Project{} + p.UpdateTeam(NewTeamID()) + assert.NotNil(t, p.Team()) +} + +func TestProject_UpdateVisualizer(t *testing.T) { + p := &Project{} + var v visualizer.Visualizer = "ttt" + p.UpdateVisualizer(v) + assert.Equal(t, v, p.Visualizer()) +} + +func TestProject_UpdateAlias(t *testing.T) { + tests := []struct { + name, a string + expected string + err error + }{ + { + name: "accepted alias", + a: "xxxxx", + expected: "xxxxx", + err: nil, + }, + { + name: "fail: invalid alias", + a: "xxx", + expected: "", + err: ErrInvalidAlias, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + p := &Project{} + err := p.UpdateAlias(tt.a) + if tt.err == nil { + assert.Equal(t, tt.expected, p.Alias()) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} + +func TestProject_UpdatePublicImage(t *testing.T) { + p := &Project{} + p.UpdatePublicImage("xxx") + assert.Equal(t, "xxx", p.PublicImage()) +} + +func TestProject_PublicName(t *testing.T) { + tests := []struct { + name string + p *Project + expected string + }{ + { + name: "private publishment status", + p: &Project{ + publishmentStatus: PublishmentStatusLimited, + alias: "aaaaa", + }, + expected: "aaaaa", + }, + { + name: "not private nor limited publishment status", + p: &Project{ + alias: "aaaaa", + }, + expected: "aaaaa", + }, + { + name: "nil project", + p: nil, + expected: "", + }, + { + name: "private publishment status", + p: &Project{ + publishmentStatus: PublishmentStatusPrivate, + }, + expected: "", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.p.PublicName() + assert.Equal(t, tt.expected, res) + }) + } +} + +func TestProject_IsBasicAuthActive(t *testing.T) { + tests := []struct { + name string + p *Project + expected bool + }{ + { + name: "basic auth is inactive", + p: &Project{ + isBasicAuthActive: false, + }, + expected: false, + }, + { + name: "basic auth is active", + p: &Project{ + isBasicAuthActive: true, + }, + expected: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.p.IsBasicAuthActive() + assert.Equal(t, tt.expected, res) + }) + } +} + +func TestProject_BasicAuthUsername(t *testing.T) { + t.Run("return basic auth username", func(t *testing.T) { + p := &Project{basicAuthUsername: "test1"} + res := p.BasicAuthUsername() + assert.Equal(t, "test1", res) + }) +} + +func TestProject_BasicAuthPassword(t *testing.T) { + t.Run("return basic auth password", func(t *testing.T) { + p := &Project{basicAuthPassword: "password"} + res := p.BasicAuthPassword() + assert.Equal(t, "password", res) + }) +} + +func TestProject_SetIsBasicAuthActive(t *testing.T) { + p := &Project{} + p.SetIsBasicAuthActive(true) + assert.Equal(t, true, p.isBasicAuthActive) +} + +func TestProject_SetBasicAuthUsername(t *testing.T) { + p := &Project{} + p.SetBasicAuthUsername("username") + assert.Equal(t, "username", p.basicAuthUsername) +} + +func TestProject_SetBasicAuthPassword(t *testing.T) { + p := &Project{} + p.SetBasicAuthPassword("password") + assert.Equal(t, "password", p.basicAuthPassword) +} diff --git a/server/pkg/project/publishment_status.go b/server/pkg/project/publishment_status.go new file mode 100644 index 000000000..43d33917e --- /dev/null +++ b/server/pkg/project/publishment_status.go @@ -0,0 +1,11 @@ +package project + +type PublishmentStatus string + +const ( + PublishmentStatusPublic PublishmentStatus = "public" + + PublishmentStatusLimited PublishmentStatus = "limited" + + PublishmentStatusPrivate PublishmentStatus = "private" +) diff --git a/server/pkg/property/builder.go b/server/pkg/property/builder.go new file mode 100644 index 000000000..17d8a2385 --- /dev/null +++ b/server/pkg/property/builder.go @@ -0,0 +1,73 @@ +package property + +type Builder struct { + p *Property +} + +func New() *Builder { + return &Builder{p: &Property{}} +} + +func (b *Builder) Build() (*Property, error) { + if b.p.id.IsNil() { + return nil, ErrInvalidID + } + if b.p.scene.IsNil() { + return nil, ErrInvalidSceneID + } + if b.p.schema.IsNil() { + return nil, ErrInvalidPropertySchemaID + } + return b.p, nil +} + +func (b *Builder) MustBuild() *Property { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *Builder) ID(id ID) *Builder { + b.p.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.p.id = NewID() + return b +} + +func (b *Builder) Scene(s SceneID) *Builder { + b.p.scene = s + return b +} + +func (b *Builder) Schema(schema SchemaID) *Builder { + b.p.schema = schema + return b +} + +func (b *Builder) Items(items []Item) *Builder { + if len(items) == 0 { + b.p.items = nil + return b + } + + newItems := make([]Item, 0, len(items)) + ids := map[ItemID]struct{}{} + for _, f := range items { + if f == nil { + continue + } + if _, ok := ids[f.ID()]; ok { + continue + } + ids[f.ID()] = struct{}{} + newItems = append(newItems, f) + } + + b.p.items = newItems + return b +} diff --git a/server/pkg/property/builder_test.go b/server/pkg/property/builder_test.go new file mode 100644 index 000000000..312cc3ef2 --- /dev/null +++ b/server/pkg/property/builder_test.go @@ -0,0 +1,194 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestBuilder_New(t *testing.T) { + b := New() + assert.NotNil(t, b) +} + +func TestBuilder_ID(t *testing.T) { + pid := NewID() + p := New().ID(pid).Scene(NewSceneID()).Schema(MustSchemaID("xxx~1.1.1/aa")).MustBuild() + assert.Equal(t, pid, p.ID()) +} + +func TestBuilder_NewID(t *testing.T) { + p := New().NewID().Scene(NewSceneID()).Schema(MustSchemaID("xxx~1.1.1/aa")).MustBuild() + assert.False(t, p.ID().IsEmpty()) +} + +func TestBuilder_Schema(t *testing.T) { + p := New().NewID().Scene(NewSceneID()).Schema(MustSchemaID("xxx~1.1.1/aa")).MustBuild() + assert.Equal(t, MustSchemaID("xxx~1.1.1/aa"), p.Schema()) +} + +func TestBuilder_Scene(t *testing.T) { + sid := NewSceneID() + p := New().NewID().Scene(sid).Schema(MustSchemaID("xxx~1.1.1/aa")).MustBuild() + assert.Equal(t, sid, p.Scene()) +} + +func TestBuilder_Items(t *testing.T) { + iid := NewItemID() + propertySchemaField1ID := FieldID("a") + propertySchemaGroup1ID := SchemaGroupID("A") + + tests := []struct { + Name string + Input, Expected []Item + }{ + { + Name: "has nil item", + Input: []Item{nil}, + Expected: []Item{}, + }, + { + Name: "has duplicated item", + Input: []Item{ + NewGroup().ID(iid).SchemaGroup(propertySchemaGroup1ID). + Fields([]*Field{ + NewField(propertySchemaField1ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). + MustBuild(), + }).MustBuild(), + NewGroup().ID(iid).SchemaGroup(propertySchemaGroup1ID). + Fields([]*Field{ + NewField(propertySchemaField1ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). + MustBuild(), + }).MustBuild(), + }, + Expected: []Item{NewGroup().ID(iid).SchemaGroup(propertySchemaGroup1ID). + Fields([]*Field{ + NewField(propertySchemaField1ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). + MustBuild(), + }).MustBuild()}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := New().NewID(). + Scene(NewSceneID()). + Schema(MustSchemaID("xxx~1.1.1/aa")). + Items(tt.Input). + MustBuild() + assert.Equal(t, tt.Expected, res.Items()) + }) + } +} + +func TestBuilder_Build(t *testing.T) { + pid := NewID() + sid := NewSceneID() + scid := MustSchemaID("xxx~1.1.1/aa") + iid := NewItemID() + propertySchemaField1ID := FieldID("a") + propertySchemaGroup1ID := SchemaGroupID("A") + + type args struct { + ID ID + Scene SceneID + Schema SchemaID + Items []Item + } + + tests := []struct { + Name string + Args args + Err error + Expected *Property + }{ + { + Name: "success", + Args: args{ + ID: pid, + Scene: sid, + Schema: scid, + Items: []Item{ + &Group{ + itemBase: itemBase{ + ID: iid, + SchemaGroup: propertySchemaGroup1ID, + }, + fields: []*Field{ + { + field: propertySchemaField1ID, + v: OptionalValueFrom(ValueTypeString.ValueFrom("xxx")), + }, + }, + }, + }, + }, + Expected: &Property{ + id: pid, + scene: sid, + schema: scid, + items: []Item{ + &Group{ + itemBase: itemBase{ + ID: iid, + SchemaGroup: propertySchemaGroup1ID, + }, + fields: []*Field{ + { + field: propertySchemaField1ID, + v: OptionalValueFrom(ValueTypeString.ValueFrom("xxx")), + }, + }, + }, + }, + }, + }, + { + Name: "fail invalid id", + Args: args{ + ID: ID{}, + }, + Err: ErrInvalidID, + }, + { + Name: "fail invalid scene", + Args: args{ + ID: pid, + }, + Err: ErrInvalidSceneID, + }, + { + Name: "fail invalid schema", + Args: args{ + ID: pid, + Scene: sid, + }, + Err: ErrInvalidPropertySchemaID, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := New(). + ID(tt.Args.ID). + Items(tt.Args.Items). + Scene(tt.Args.Scene). + Schema(tt.Args.Schema). + Build() + if tt.Err == nil { + assert.Nil(t, err) + assert.Equal(t, tt.Expected, res) + } else { + assert.Nil(t, res) + assert.Equal(t, tt.Err, err) + } + }) + } +} diff --git a/server/pkg/property/condition.go b/server/pkg/property/condition.go new file mode 100644 index 000000000..8424da317 --- /dev/null +++ b/server/pkg/property/condition.go @@ -0,0 +1,16 @@ +package property + +type Condition struct { + Field FieldID + Value *Value +} + +func (c *Condition) Clone() *Condition { + if c == nil { + return nil + } + return &Condition{ + Field: c.Field, + Value: c.Value.Clone(), + } +} diff --git a/server/pkg/property/condition_test.go b/server/pkg/property/condition_test.go new file mode 100644 index 000000000..0f77682d0 --- /dev/null +++ b/server/pkg/property/condition_test.go @@ -0,0 +1,40 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCondition_Clone(t *testing.T) { + tests := []struct { + Name string + Con, Expected *Condition + }{ + { + Name: "nil condition", + Con: nil, + Expected: nil, + }, + { + Name: "nil condition", + Con: &Condition{ + Field: "a", + Value: ValueTypeBool.ValueFrom(true), + }, + Expected: &Condition{ + Field: "a", + Value: ValueTypeBool.ValueFrom(true), + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Con.Clone() + assert.Equal(t, tt.Expected, res) + }) + } +} diff --git a/server/pkg/property/diff.go b/server/pkg/property/diff.go new file mode 100644 index 000000000..ade7c7d0e --- /dev/null +++ b/server/pkg/property/diff.go @@ -0,0 +1,140 @@ +package property + +type SchemaDiff struct { + From SchemaID + To SchemaID + Deleted []SchemaDiffDeleted + Moved []SchemaDiffMoved + TypeChanged []SchemaDiffTypeChanged +} + +type SchemaDiffDeleted SchemaFieldPointer + +type SchemaDiffMoved struct { + From SchemaFieldPointer + To SchemaFieldPointer + ToList bool +} + +type SchemaDiffTypeChanged struct { + SchemaFieldPointer + NewType ValueType +} + +func SchemaDiffFrom(old, new *Schema) (d SchemaDiff) { + if old != nil { + d.From = old.ID() + } + if new != nil { + d.To = new.ID() + } + if old == nil || new == nil || old == new { + return + } + + for _, gf := range old.Groups().GroupAndFields() { + ngf := new.Groups().GroupAndField(gf.Field.ID()) + if ngf == nil { + d.Deleted = append(d.Deleted, SchemaDiffDeleted(gf.SchemaFieldPointer())) + continue + } + + if ngf.Group.ID() != gf.Group.ID() { + d.Moved = append(d.Moved, SchemaDiffMoved{ + From: gf.SchemaFieldPointer(), + To: ngf.SchemaFieldPointer(), + ToList: ngf.Group.IsList(), + }) + } + + if ngf.Field.Type() != gf.Field.Type() { + d.TypeChanged = append(d.TypeChanged, SchemaDiffTypeChanged{ + SchemaFieldPointer: ngf.SchemaFieldPointer(), + NewType: ngf.Field.Type(), + }) + } + } + + return +} + +func SchemaDiffFromProperty(old *Property, new *Schema) (d SchemaDiff) { + return SchemaDiffFrom(old.GuessSchema(), new) +} + +func (d *SchemaDiff) Migrate(p *Property) (res bool) { + if d.IsEmpty() { + return + } + + res = p.updateSchema(d.To) + + for _, dd := range d.Deleted { + if p.RemoveFields(SchemaFieldPointer(dd).Pointer()) { + res = true + } + } + + for _, dm := range d.Moved { + if dm.ToList { + // group -> list and list -> list are not supported; just delete + if p.RemoveFields(dm.From.Pointer()) { + res = true + } + continue + } + + if p.MoveFields(dm.From.Pointer(), dm.To.Pointer()) { + res = true + } + } + + for _, dt := range d.TypeChanged { + if p.Cast(dt.Pointer(), dt.NewType) { + res = true + } + } + + return +} + +func (d *SchemaDiff) IsEmpty() bool { + return d == nil || len(d.Deleted) == 0 && len(d.Moved) == 0 && len(d.TypeChanged) == 0 +} + +func (d *SchemaDiff) IsIDChanged() bool { + return d != nil && !d.From.Equal(d.To) +} + +type SchemaDiffList []SchemaDiff + +func (l SchemaDiffList) FindByFrom(from SchemaID) *SchemaDiff { + for _, d := range l { + if d.From.Equal(from) { + return &d + } + } + return nil +} + +func (l SchemaDiffList) FromSchemas() []SchemaID { + if len(l) == 0 { + return nil + } + + res := make([]SchemaID, 0, len(l)) + for _, d := range l { + s := d.From + found := false + for _, r := range res { + if r.Equal(s) { + found = true + break + } + } + if !found { + res = append(res, s) + } + } + return res +} diff --git a/server/pkg/property/diff_test.go b/server/pkg/property/diff_test.go new file mode 100644 index 000000000..63a6c8629 --- /dev/null +++ b/server/pkg/property/diff_test.go @@ -0,0 +1,619 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestSchemaDiffFrom(t *testing.T) { + ps1 := MustSchemaID("x~1.0.0/a") + ps2 := MustSchemaID("x~1.0.0/b") + + type args struct { + old *Schema + new *Schema + } + tests := []struct { + name string + args args + want SchemaDiff + }{ + { + name: "diff", + args: args{ + old: &Schema{ + id: ps1, + groups: &SchemaGroupList{groups: []*SchemaGroup{ + {id: "a", fields: []*SchemaField{ + {id: "aa", propertyType: ValueTypeString}, // deleted + {id: "ab", propertyType: ValueTypeString}, + {id: "ac", propertyType: ValueTypeString}, + {id: "ad", propertyType: ValueTypeString}, + }}, + }}, + }, + new: &Schema{ + id: ps2, + groups: &SchemaGroupList{groups: []*SchemaGroup{ + {id: "a", fields: []*SchemaField{ + {id: "ab", propertyType: ValueTypeNumber}, // type changed + {id: "ae", propertyType: ValueTypeString}, // added + }}, + {id: "b", list: true, fields: []*SchemaField{ + {id: "ac", propertyType: ValueTypeString}, // moved + {id: "ad", propertyType: ValueTypeNumber}, // moved and type changed + }}, + }}, + }, + }, + want: SchemaDiff{ + From: ps1, + To: ps2, + Deleted: []SchemaDiffDeleted{ + {SchemaGroup: "a", Field: "aa"}, + }, + Moved: []SchemaDiffMoved{ + {From: SchemaFieldPointer{SchemaGroup: "a", Field: "ac"}, To: SchemaFieldPointer{SchemaGroup: "b", Field: "ac"}, ToList: true}, + {From: SchemaFieldPointer{SchemaGroup: "a", Field: "ad"}, To: SchemaFieldPointer{SchemaGroup: "b", Field: "ad"}, ToList: true}, + }, + TypeChanged: []SchemaDiffTypeChanged{ + {SchemaFieldPointer: SchemaFieldPointer{SchemaGroup: "a", Field: "ab"}, NewType: ValueTypeNumber}, + {SchemaFieldPointer: SchemaFieldPointer{SchemaGroup: "b", Field: "ad"}, NewType: ValueTypeNumber}, + }, + }, + }, + { + name: "no diff", + args: args{ + old: &Schema{ + id: ps1, + groups: &SchemaGroupList{groups: []*SchemaGroup{ + {id: "a", fields: []*SchemaField{ + {id: "aa", propertyType: ValueTypeNumber}, + }}, + }}, + }, + new: &Schema{ + id: ps2, + groups: &SchemaGroupList{groups: []*SchemaGroup{ + {id: "a", fields: []*SchemaField{ + {id: "aa", propertyType: ValueTypeNumber}, + }}, + {id: "b", list: true, fields: []*SchemaField{ + {id: "ba", propertyType: ValueTypeString}, // added + }}, + }}, + }, + }, + want: SchemaDiff{ + From: ps1, + To: ps2, + }, + }, + { + name: "same schemas", + args: args{ + old: testSchema1, + new: testSchema1, + }, + want: SchemaDiff{ + From: testSchema1.ID(), + To: testSchema1.ID(), + }, + }, + { + name: "nil", + args: args{ + old: nil, + new: nil, + }, + want: SchemaDiff{}, + }, + { + name: "old nil", + args: args{ + old: nil, + new: testSchema1, + }, + want: SchemaDiff{ + To: testSchema1.ID(), + }, + }, + { + name: "new nil", + args: args{ + old: testSchema1, + new: nil, + }, + want: SchemaDiff{ + From: testSchema1.ID(), + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, SchemaDiffFrom(tt.args.old, tt.args.new)) + }) + } +} + +func TestSchemaDiffFromProperty(t *testing.T) { + ps := MustSchemaID("x~1.0.0/a") + + type args struct { + old *Property + new *Schema + } + tests := []struct { + name string + args args + want SchemaDiff + }{ + { + name: "diff", + args: args{ + old: testProperty1, + new: &Schema{ + id: ps, + groups: &SchemaGroupList{groups: []*SchemaGroup{ + {id: testSchemaGroup1.ID(), fields: []*SchemaField{ + {id: testSchemaField1.ID(), propertyType: ValueTypeNumber}, // type changed + {id: testSchemaField3.ID(), propertyType: ValueTypeNumber}, // moved and type changed + {id: "xxxx", propertyType: ValueTypeString}, // added + }}, + {id: testSchemaGroup2.ID(), list: true, fields: []*SchemaField{}}, + }}, + }, + }, + want: SchemaDiff{ + From: testProperty1.Schema(), + To: ps, + Deleted: nil, + Moved: []SchemaDiffMoved{ + { + From: SchemaFieldPointer{SchemaGroup: testSchemaGroup2.ID(), Field: testSchemaField3.ID()}, + To: SchemaFieldPointer{SchemaGroup: testSchemaGroup1.ID(), Field: testSchemaField3.ID()}, + }, + }, + TypeChanged: []SchemaDiffTypeChanged{ + {SchemaFieldPointer: SchemaFieldPointer{SchemaGroup: testSchemaGroup1.ID(), Field: testSchemaField1.ID()}, NewType: ValueTypeNumber}, + {SchemaFieldPointer: SchemaFieldPointer{SchemaGroup: testSchemaGroup1.ID(), Field: testSchemaField3.ID()}, NewType: ValueTypeNumber}, + }, + }, + }, + { + name: "no diff", + args: args{ + old: testProperty1, + new: testSchema1, + }, + want: SchemaDiff{ + From: testProperty1.Schema(), + To: testSchema1.ID(), + }, + }, + { + name: "nil", + args: args{ + old: nil, + new: nil, + }, + want: SchemaDiff{}, + }, + { + name: "old nil", + args: args{ + old: nil, + new: testSchema1, + }, + want: SchemaDiff{ + To: testSchema1.ID(), + }, + }, + { + name: "new nil", + args: args{ + old: testProperty1, + new: nil, + }, + want: SchemaDiff{ + From: testProperty1.Schema(), + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, SchemaDiffFromProperty(tt.args.old, tt.args.new)) + }) + } +} + +func TestSchemaDiff_Migrate(t *testing.T) { + itemID := NewItemID() + newSchemaID := MustSchemaID("x~1.0.0/ax") + + tests := []struct { + name string + target *SchemaDiff + args *Property + want bool + wantProperty *Property + only bool + }{ + { + name: "deleted and type changed", + target: &SchemaDiff{ + To: newSchemaID, + Deleted: []SchemaDiffDeleted{ + {SchemaGroup: testGroup1.SchemaGroup(), Field: testField1.Field()}, + }, + TypeChanged: []SchemaDiffTypeChanged{ + {SchemaFieldPointer: SchemaFieldPointer{SchemaGroup: testGroupList1.SchemaGroup(), Field: testField2.Field()}, NewType: ValueTypeString}, + }, + }, + args: testProperty1.Clone(), + want: true, + wantProperty: &Property{ + id: testProperty1.ID(), + scene: testProperty1.Scene(), + schema: newSchemaID, + items: []Item{ + &Group{ + itemBase: itemBase{ + ID: testGroup1.ID(), + SchemaGroup: testGroup1.SchemaGroup(), + }, + fields: []*Field{}, // deleted + }, + &GroupList{ + itemBase: itemBase{ + ID: testGroupList1.ID(), + SchemaGroup: testGroupList1.SchemaGroup(), + }, + groups: []*Group{ + { + itemBase: itemBase{ + ID: testGroup2.ID(), + SchemaGroup: testGroup2.SchemaGroup(), + }, + fields: []*Field{ + {field: testField2.Field(), v: NewOptionalValue(ValueTypeString, nil)}, // type changed + }, + }, + }, + }, + }, + }, + }, + { + name: "moved", + target: &SchemaDiff{ + To: newSchemaID, + Moved: []SchemaDiffMoved{ + { + From: SchemaFieldPointer{SchemaGroup: testGroup1.SchemaGroup(), Field: testField1.Field()}, + To: SchemaFieldPointer{SchemaGroup: "x", Field: testField1.Field()}, + }, + }, + }, + args: testProperty1.Clone(), + want: true, + wantProperty: &Property{ + id: testProperty1.ID(), + scene: testProperty1.Scene(), + schema: newSchemaID, + items: []Item{ + &Group{ + itemBase: itemBase{ + ID: testGroup1.ID(), + SchemaGroup: testGroup1.SchemaGroup(), + }, + fields: []*Field{}, // deleted + }, + testGroupList1, + &Group{ + itemBase: itemBase{ + ID: itemID, + SchemaGroup: "x", + }, + fields: []*Field{testField1}, + }, + }, + }, + }, + { + name: "moved and type changed", + target: &SchemaDiff{ + To: newSchemaID, + Moved: []SchemaDiffMoved{ + { + From: SchemaFieldPointer{SchemaGroup: testGroup1.SchemaGroup(), Field: testField1.Field()}, + To: SchemaFieldPointer{SchemaGroup: "x", Field: testField1.Field()}, + }, + }, + TypeChanged: []SchemaDiffTypeChanged{ + {SchemaFieldPointer: SchemaFieldPointer{SchemaGroup: "x", Field: testField1.Field()}, NewType: ValueTypeNumber}, + }, + }, + args: testProperty1.Clone(), + want: true, + wantProperty: &Property{ + id: testProperty1.ID(), + scene: testProperty1.Scene(), + schema: newSchemaID, + items: []Item{ + &Group{ + itemBase: itemBase{ + ID: testGroup1.ID(), + SchemaGroup: testGroup1.SchemaGroup(), + }, + fields: []*Field{}, // deleted + }, + testGroupList1, + &Group{ + itemBase: itemBase{ + ID: itemID, + SchemaGroup: "x", + }, + fields: []*Field{ + {field: testField1.Field(), v: NewOptionalValue(ValueTypeNumber, nil)}, + }, + }, + }, + }, + }, + { + name: "group -> list", + target: &SchemaDiff{ + To: newSchemaID, + Moved: []SchemaDiffMoved{ + { + From: SchemaFieldPointer{SchemaGroup: testGroup1.SchemaGroup(), Field: testField1.Field()}, + To: SchemaFieldPointer{SchemaGroup: testGroup2.SchemaGroup(), Field: testField1.Field()}, + }, + }, + }, + args: testProperty1.Clone(), + want: true, + wantProperty: &Property{ + id: testProperty1.ID(), + scene: testProperty1.Scene(), + schema: newSchemaID, + items: []Item{ + &Group{ + itemBase: itemBase{ + ID: testGroup1.ID(), + SchemaGroup: testGroup1.SchemaGroup(), + }, + fields: []*Field{}, // deleted + }, + testGroupList1, + }, + }, + }, + { + name: "group -> list (ToList)", + target: &SchemaDiff{ + To: newSchemaID, + Moved: []SchemaDiffMoved{ + { + From: SchemaFieldPointer{SchemaGroup: testGroup1.SchemaGroup(), Field: testField1.Field()}, + To: SchemaFieldPointer{SchemaGroup: testGroup2.SchemaGroup(), Field: testField1.Field()}, + ToList: true, + }, + }, + }, + args: testProperty1.Clone(), + want: true, + wantProperty: &Property{ + id: testProperty1.ID(), + scene: testProperty1.Scene(), + schema: newSchemaID, + items: []Item{ + &Group{ + itemBase: itemBase{ + ID: testGroup1.ID(), + SchemaGroup: testGroup1.SchemaGroup(), + }, + fields: []*Field{}, // deleted + }, + testGroupList1, + }, + }, + }, + { + name: "list -> group", + target: &SchemaDiff{ + To: newSchemaID, + Moved: []SchemaDiffMoved{ + { + From: SchemaFieldPointer{SchemaGroup: testGroup2.SchemaGroup(), Field: testField2.Field()}, + To: SchemaFieldPointer{SchemaGroup: testGroup1.SchemaGroup(), Field: testField2.Field()}, + }, + }, + }, + args: testProperty1.Clone(), + want: true, + wantProperty: &Property{ + id: testProperty1.ID(), + scene: testProperty1.Scene(), + schema: newSchemaID, + items: []Item{ + testGroup1, + &GroupList{ + itemBase: itemBase{ + ID: testGroupList1.ID(), + SchemaGroup: testGroupList1.SchemaGroup(), + }, + groups: []*Group{ + { + itemBase: itemBase{ + ID: testGroup2.ID(), + SchemaGroup: testGroup2.SchemaGroup(), + }, + fields: []*Field{}, // deleted + }, + }, + }, + }, + }, + }, + { + name: "empty", + target: &SchemaDiff{}, + args: testProperty1, + want: false, + wantProperty: testProperty1, + }, + { + name: "nil property", + target: &SchemaDiff{ + To: newSchemaID, + Deleted: []SchemaDiffDeleted{{SchemaGroup: testGroup1.SchemaGroup(), Field: testField1.Field()}}, + }, + args: nil, + want: false, + wantProperty: nil, + }, + { + name: "nil", + target: nil, + args: nil, + want: false, + wantProperty: nil, + }, + } + + only := false + for _, tt := range tests { + if tt.only { + only = true + break + } + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + // t.Parallel() // Cannot run tests in parallel due to mocking NewItemID + if only && !tt.only { + t.SkipNow() + } + defer mockNewItemID(itemID)() + assert.Equal(t, tt.want, tt.target.Migrate(tt.args)) + assert.Equal(t, tt.wantProperty, tt.args) + }) + } +} + +func TestSchemaDiff_IsEmpty(t *testing.T) { + tests := []struct { + name string + target *SchemaDiff + want bool + }{ + { + name: "present", + target: &SchemaDiff{ + Deleted: []SchemaDiffDeleted{{SchemaGroup: "", Field: ""}}, + }, + want: false, + }, + { + name: "empty", + target: &SchemaDiff{}, + want: true, + }, + { + name: "nil", + target: nil, + want: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.IsEmpty()) + }) + } +} + +func TestSchemaDiff_IsIDChanged(t *testing.T) { + tests := []struct { + name string + target *SchemaDiff + want bool + }{ + { + name: "changed1", + target: &SchemaDiff{ + From: id.MustPropertySchemaID("a~1.0.0/a"), + To: id.MustPropertySchemaID("a~1.0.1/a"), + }, + want: true, + }, + { + name: "changed2", + target: &SchemaDiff{ + From: id.MustPropertySchemaID("a~1.0.0/a"), + }, + want: true, + }, + { + name: "changed3", + target: &SchemaDiff{ + To: id.MustPropertySchemaID("a~1.0.0/a"), + }, + want: true, + }, + { + name: "unchanged1", + target: &SchemaDiff{ + From: id.MustPropertySchemaID("a~1.0.0/a"), + To: id.MustPropertySchemaID("a~1.0.0/a"), + }, + want: false, + }, + { + name: "empty", + target: &SchemaDiff{}, + want: false, + }, + { + name: "nil", + target: nil, + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.IsIDChanged()) + }) + } +} + +func TestSchemaDiffList_FindByFrom(t *testing.T) { + p1 := MustSchemaID("a~1.0.0/a") + p2 := MustSchemaID("a~1.0.0/b") + + assert.Equal(t, &SchemaDiff{From: p1}, SchemaDiffList{{From: p1}}.FindByFrom(p1)) + assert.Nil(t, SchemaDiffList{}.FindByFrom(p2)) + assert.Nil(t, SchemaDiffList{}.FindByFrom(p1)) + assert.Nil(t, SchemaDiffList(nil).FindByFrom(p1)) +} + +func TestSchemaDiffList_FromSchemas(t *testing.T) { + p1 := MustSchemaID("a~1.0.0/a") + p2 := MustSchemaID("a~1.0.0/b") + + assert.Equal(t, []SchemaID{p1, p2}, SchemaDiffList{{From: p1}, {From: p2}, {From: p2}}.FromSchemas()) + assert.Nil(t, SchemaDiffList{}.FromSchemas()) + assert.Nil(t, SchemaDiffList(nil).FromSchemas()) +} diff --git a/server/pkg/property/field.go b/server/pkg/property/field.go new file mode 100644 index 000000000..32617a3ed --- /dev/null +++ b/server/pkg/property/field.go @@ -0,0 +1,222 @@ +package property + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/pkg/dataset" +) + +var ( + ErrInvalidPropertyValue = errors.New("invalid property value") + ErrCannotLinkDataset = errors.New("cannot link dataset") + ErrInvalidPropertyType = errors.New("invalid property type") + ErrInvalidPropertyField = errors.New("invalid property field") +) + +type Field struct { + field FieldID + links *Links + v *OptionalValue +} + +func (p *Field) Clone() *Field { + if p == nil { + return nil + } + return &Field{ + field: p.field, + links: p.links.Clone(), + v: p.v.Clone(), + } +} + +func (p *Field) Field() FieldID { + return p.field +} + +func (p *Field) FieldRef() *FieldID { + if p == nil { + return nil + } + return p.field.Ref() +} + +func (p *Field) Links() *Links { + if p == nil { + return nil + } + return p.links +} + +func (p *Field) Type() ValueType { + if p == nil { + return ValueTypeUnknown + } + return p.v.Type() +} + +func (p *Field) Value() *Value { + if p == nil { + return nil + } + return p.v.Value() +} + +func (p *Field) TypeAndValue() *OptionalValue { + if p == nil { + return nil + } + return p.v +} + +func (p *Field) ActualValue(ds *dataset.Dataset) *ValueAndDatasetValue { + if p == nil { + return nil + } + + var dv *dataset.Value + if p.links != nil { + if l := p.links.Last(); l != nil { + d := l.Dataset() + if d != nil && ds.ID() == *d && l.DatasetSchemaField() != nil { + dv = ds.Field(*l.DatasetSchemaField()).Value() + } else { + return nil + } + } else { + return nil + } + } + return NewValueAndDatasetValue(p.Type(), dv, p.Value()) +} + +func (p *Field) Datasets() []DatasetID { + if p == nil { + return nil + } + + res := []DatasetID{} + if p.Links().IsLinkedFully() { + dsid := p.Links().Last().Dataset() + if dsid != nil { + res = append(res, *dsid) + } + } + + return res +} + +func (p *Field) IsDatasetLinked(s DatasetSchemaID, i DatasetID) bool { + return p.Links().HasDatasetSchemaAndDataset(s, i) +} + +func (p *Field) Update(value *Value, field *SchemaField) error { + if p == nil { + return nil + } + if field == nil || p.field != field.ID() || !field.Validate(p.v) { + return ErrInvalidPropertyValue + } + p.v.SetValue(value) + return nil +} + +func (p *Field) UpdateUnsafe(value *Value) { + if p == nil { + return + } + p.v.SetValue(value) +} + +func (p *Field) Cast(t ValueType) bool { + if p == nil || t == ValueTypeUnknown || p.Type() == ValueTypeUnknown || p.Type() == t { + return false + } + p.v = p.v.Cast(t) + p.Unlink() + return true +} + +func (p *Field) Link(links *Links) { + if p == nil { + return + } + p.links = links.Clone() +} + +func (p *Field) Unlink() { + p.Link(nil) +} + +func (p *Field) UpdateField(field FieldID) { + if p == nil { + return + } + p.field = field +} + +func (p *Field) IsEmpty() bool { + return p == nil || p.Value().IsEmpty() && p.Links().IsEmpty() +} + +func (p *Field) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset.Loader) bool { + if p == nil || dl == nil || newSchema == nil { + return false + } + + fid := p.Field() + schemaField := newSchema.Groups().Field(fid) + + // If field is not found in new schema, this field should be removed + invalid := schemaField == nil + + // if value is not compatible for type, value will be cleared + if !schemaField.Validate(p.v) { + p.UpdateUnsafe(nil) + } + + // If linked dataset is not compatible for type, it will be unlinked + l := p.Links() + if dl != nil && l.IsLinkedFully() { + if dsid, dsfid := l.Last().Dataset(), l.Last().DatasetSchemaField(); dsid != nil && dsfid != nil { + dss, _ := dl(ctx, *dsid) + if dsf := dss[0].Field(*dsfid); dsf != nil { + if schemaField.Type() != ValueType(dsf.Type()) { + p.Unlink() + } + } + } + } + + return !invalid +} + +func (p *Field) MigrateDataset(q DatasetMigrationParam) { + if p == nil { + return + } + link := p.Links() + link.Replace(q.OldDatasetSchemaMap, q.OldDatasetMap, q.DatasetFieldIDMap) + if !link.Validate(q.NewDatasetSchemaMap, q.NewDatasetMap) { + p.Unlink() + } +} + +func (f *Field) GuessSchema() *SchemaField { + if f == nil { + return nil + } + if f, err := NewSchemaField().ID(f.Field()).Type(f.Type()).Build(); err == nil { + return f + } + return nil +} + +type DatasetMigrationParam struct { + OldDatasetSchemaMap map[DatasetSchemaID]DatasetSchemaID + OldDatasetMap map[DatasetID]DatasetID + DatasetFieldIDMap map[DatasetFieldID]DatasetFieldID + NewDatasetSchemaMap map[DatasetSchemaID]*dataset.Schema + NewDatasetMap map[DatasetID]*dataset.Dataset +} diff --git a/server/pkg/property/field_builder.go b/server/pkg/property/field_builder.go new file mode 100644 index 000000000..0f9593516 --- /dev/null +++ b/server/pkg/property/field_builder.go @@ -0,0 +1,64 @@ +package property + +import "fmt" + +type FieldBuilder struct { + p *Field +} + +func NewField(field FieldID) *FieldBuilder { + return &FieldBuilder{ + p: &Field{ + field: field, + }, + } +} + +func FieldFrom(sf *SchemaField) *FieldBuilder { + if sf == nil { + return NewField("") + } + return &FieldBuilder{ + p: &Field{ + field: sf.ID(), + v: NewOptionalValue(sf.Type(), nil), + }, + } +} + +func (b *FieldBuilder) Build() *Field { + if b.p.field == "" || b.p.v == nil { + return nil + } + return b.p +} + +func (b *FieldBuilder) MustBuild() *Field { + f := b.Build() + if f == nil { + panic(fmt.Sprintf("field ID or type is invalid: id=%s, type=%s", b.p.field, b.p.v.Type())) + } + return f +} + +func (b *FieldBuilder) Field(field FieldID) *FieldBuilder { + b.p.field = field + return b +} + +func (b *FieldBuilder) Value(v *OptionalValue) *FieldBuilder { + b.p.v = v.Clone() + return b +} + +func (b *FieldBuilder) Type(t ValueType) *FieldBuilder { + if b.p.v.Type() != t { + b.p.v = NewOptionalValue(t, nil) + } + return b +} + +func (b *FieldBuilder) Links(l *Links) *FieldBuilder { + b.p.links = l.Clone() + return b +} diff --git a/server/pkg/property/field_builder_test.go b/server/pkg/property/field_builder_test.go new file mode 100644 index 000000000..f6041dfb9 --- /dev/null +++ b/server/pkg/property/field_builder_test.go @@ -0,0 +1,120 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestFieldBuilder_Value(t *testing.T) { + p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + b := FieldFrom(p).Value(OptionalValueFrom(v)).Build() + assert.Equal(t, v, b.Value()) +} + +func TestFieldBuilder_Link(t *testing.T) { + p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() + l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) + ls := NewLinks([]*Link{l}) + b := FieldFrom(p).Links(ls).Build() + assert.Equal(t, ls, b.Links()) +} + +func TestFieldBuilder_Build(t *testing.T) { + l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) + + type args struct { + Links *Links + Field FieldID + Value *OptionalValue + } + + tests := []struct { + Name string + Args args + Expected *Field + }{ + { + Name: "fail invalid property id", + }, + { + Name: "success", + Args: args{ + Field: "A", + Links: NewLinks([]*Link{l}), + Value: OptionalValueFrom(ValueTypeString.ValueFrom("vvv")), + }, + Expected: &Field{ + field: "A", + links: NewLinks([]*Link{l}), + v: OptionalValueFrom(ValueTypeString.ValueFrom("vvv")), + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := NewField(tt.Args.Field). + Value(tt.Args.Value). + Links(tt.Args.Links). + Build() + assert.Equal(t, tt.Expected, res) + }) + } +} + +func TestFieldBuilder_MustBuild(t *testing.T) { + l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) + + type args struct { + Links *Links + Field FieldID + Value *OptionalValue + } + + tests := []struct { + Name string + Args args + Expected *Field + }{ + { + Name: "fail invalid property id", + }, + { + Name: "success", + Args: args{ + Field: "A", + Links: NewLinks([]*Link{l}), + Value: OptionalValueFrom(ValueTypeString.ValueFrom("vvv")), + }, + Expected: &Field{ + field: "A", + links: NewLinks([]*Link{l}), + v: OptionalValueFrom(ValueTypeString.ValueFrom("vvv")), + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + build := func() *Field { + return NewField(tt.Args.Field). + Value(tt.Args.Value). + Links(tt.Args.Links). + MustBuild() + } + + if tt.Expected == nil { + assert.Panics(t, func() { _ = build() }) + } else { + assert.Equal(t, tt.Expected, build()) + } + }) + } +} diff --git a/server/pkg/property/field_test.go b/server/pkg/property/field_test.go new file mode 100644 index 000000000..24d0c7386 --- /dev/null +++ b/server/pkg/property/field_test.go @@ -0,0 +1,333 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/stretchr/testify/assert" +) + +var ( + testField1 = NewField(testSchemaField1.ID()).Value(OptionalValueFrom(ValueTypeString.ValueFrom("aaa"))).MustBuild() + testField2 = NewField(testSchemaField3.ID()).Value(NewOptionalValue(ValueTypeLatLng, nil)).MustBuild() +) + +func TestField_ActualValue(t *testing.T) { + p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + dssfid := NewDatasetFieldID() + ls := NewLinks([]*Link{NewLink(dsid, dssid, dssfid)}) + + tests := []struct { + Name string + Field *Field + DS *dataset.Dataset + Expected *ValueAndDatasetValue + }{ + { + Name: "nil links", + Field: FieldFrom(p). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). + MustBuild(), + Expected: NewValueAndDatasetValue(ValueTypeString, nil, ValueTypeString.ValueFrom("vvv")), + }, + { + Name: "empty link", + Field: FieldFrom(p). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). + Links(&Links{}). + MustBuild(), + Expected: nil, + }, + { + Name: "dataset value", + Field: FieldFrom(p). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). + Links(ls). + MustBuild(), + DS: dataset.New(). + ID(dsid).Schema(dssid). + Fields([]*dataset.Field{ + dataset.NewField(dssfid, dataset.ValueTypeString.ValueFrom("xxx"), "")}, + ). + MustBuild(), + Expected: NewValueAndDatasetValue(ValueTypeString, dataset.ValueTypeString.ValueFrom("xxx"), ValueTypeString.ValueFrom("vvv")), + }, + { + Name: "dataset value missing", + Field: NewField("a").Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Links(ls).Build(), + DS: dataset.New().ID(dsid).Schema(dssid).MustBuild(), + Expected: NewValueAndDatasetValue(ValueTypeString, nil, ValueTypeString.ValueFrom("vvv")), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.Field.ActualValue(tc.DS) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestField_Datasets(t *testing.T) { + p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + dssfid := NewDatasetFieldID() + l := NewLink(dsid, dssid, dssfid) + ls := NewLinks([]*Link{l}) + + tests := []struct { + Name string + Field *Field + Expected []DatasetID + }{ + { + Name: "list of one datasets", + Field: FieldFrom(p). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). + Links(ls). + MustBuild(), + Expected: []DatasetID{dsid}, + }, + { + Name: "nil field", + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.Field.Datasets() + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestField_Clone(t *testing.T) { + l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) + ls := NewLinks([]*Link{l}) + b := NewField("a").Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Links(ls).Build() + + tests := []struct { + name string + target *Field + want *Field + }{ + { + name: "ok", + target: b, + want: b, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + r := b.Clone() + assert.Equal(t, b, r) + if tt.want != nil { + assert.NotSame(t, b, r) + } + }) + } +} + +func TestField_IsEmpty(t *testing.T) { + tests := []struct { + name string + target *Field + want bool + }{ + { + name: "empty", + target: &Field{}, + want: true, + }, + { + name: "empty value", + target: NewField("a").Value(NewOptionalValue(ValueTypeString, nil)).Build(), + want: true, + }, + { + name: "not empty", + target: NewField("a").Value(OptionalValueFrom(ValueTypeString.ValueFrom("x"))).Build(), + want: false, + }, + { + name: "nil", + target: nil, + want: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.IsEmpty()) + }) + } +} + +func TestField_Link(t *testing.T) { + did := NewDatasetID() + dsid := NewDatasetSchemaID() + dfid := NewDatasetFieldID() + l := NewLinks([]*Link{NewLink(did, dsid, dfid)}) + + tests := []struct { + name string + target *Field + args *Links + }{ + { + name: "link", + target: testField1.Clone(), + args: l, + }, + { + name: "unlink", + target: NewField("a").Value(NewOptionalValue(ValueTypeString, nil)).Links(l).Build(), + args: nil, + }, + { + name: "empty", + target: &Field{}, + args: nil, + }, + { + name: "nil", + target: nil, + args: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + tt.target.Link(tt.args) + if tt.target != nil { + assert.Equal(t, tt.args, tt.target.links) + } + }) + } +} + +func TestField_Update(t *testing.T) { + p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() + b := FieldFrom(p). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). + MustBuild() + v := ValueTypeString.ValueFrom("xxx") + b.UpdateUnsafe(v) + assert.Equal(t, v, b.Value()) +} + +func TestField_Cast(t *testing.T) { + dgp := NewLinks([]*Link{ + NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()), + }) + + type args struct { + t ValueType + } + tests := []struct { + name string + target *Field + args args + want *Field + }{ + { + name: "ok", + target: &Field{ + field: FieldID("foobar"), + v: OptionalValueFrom(ValueTypeString.ValueFrom("-123")), + links: dgp.Clone(), + }, + args: args{t: ValueTypeNumber}, + want: &Field{ + field: FieldID("foobar"), + v: OptionalValueFrom(ValueTypeNumber.ValueFrom(-123)), + }, + }, + { + name: "failed", + target: &Field{ + field: FieldID("foobar"), + v: OptionalValueFrom(ValueTypeString.ValueFrom("foo")), + links: dgp.Clone(), + }, + args: args{t: ValueTypeLatLng}, + want: &Field{ + field: FieldID("foobar"), + v: NewOptionalValue(ValueTypeLatLng, nil), + }, + }, + { + name: "empty", + target: &Field{}, + args: args{t: ValueTypeNumber}, + want: &Field{}, + }, + { + name: "nil", + target: nil, + args: args{t: ValueTypeNumber}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + tt.target.Cast(tt.args.t) + assert.Equal(t, tt.want, tt.target) + }) + } +} + +func TestField_GuessSchema(t *testing.T) { + tests := []struct { + name string + target *Field + want *SchemaField + }{ + { + name: "ok", + target: &Field{field: "a", v: NewOptionalValue(ValueTypeLatLng, nil)}, + want: &SchemaField{id: "a", propertyType: ValueTypeLatLng}, + }, + { + name: "empty", + target: &Field{}, + want: nil, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.GuessSchema()) + }) + } +} diff --git a/server/pkg/property/group.go b/server/pkg/property/group.go new file mode 100644 index 000000000..b2e6bf2d3 --- /dev/null +++ b/server/pkg/property/group.go @@ -0,0 +1,328 @@ +package property + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/pkg/dataset" +) + +// Group represents a group of property +type Group struct { + itemBase + fields []*Field +} + +// Group implements Item interface +var _ Item = &Group{} + +func (g *Group) ID() ItemID { + return g.itemBase.ID +} + +func (g *Group) IDRef() *ItemID { + if g == nil { + return nil + } + return g.itemBase.ID.Ref() +} + +func (g *Group) SchemaGroup() SchemaGroupID { + return g.itemBase.SchemaGroup +} + +func (g *Group) SchemaGroupRef() *SchemaGroupID { + if g == nil { + return nil + } + return g.itemBase.SchemaGroup.Ref() +} + +func (g *Group) HasLinkedField() bool { + if g == nil { + return false + } + for _, f := range g.fields { + if f.Links().IsLinked() { + return true + } + } + return false +} + +func (g *Group) Datasets() []DatasetID { + if g == nil { + return nil + } + res := []DatasetID{} + + for _, f := range g.fields { + res = append(res, f.Datasets()...) + } + + return res +} + +func (g *Group) FieldsByLinkedDataset(s DatasetSchemaID, i DatasetID) []*Field { + if g == nil { + return nil + } + res := []*Field{} + for _, f := range g.fields { + if f.Links().HasSchemaAndDataset(s, i) { + res = append(res, f) + } + } + return res +} + +func (g *Group) IsDatasetLinked(s DatasetSchemaID, i DatasetID) bool { + if g == nil { + return false + } + for _, f := range g.fields { + if f.IsDatasetLinked(s, i) { + return true + } + } + return false +} + +func (g *Group) IsEmpty() bool { + if g != nil { + for _, f := range g.fields { + if !f.IsEmpty() { + return false + } + } + } + return true +} + +func (g *Group) Prune() (res bool) { + if g == nil { + return + } + for _, f := range g.fields { + if f.IsEmpty() { + if g.RemoveField(f.Field()) { + res = true + } + } + } + return +} + +// TODO: group migration +func (g *Group) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset.Loader) { + if g == nil || dl == nil { + return + } + + for _, f := range g.fields { + if !f.MigrateSchema(ctx, newSchema, dl) { + g.RemoveField(f.Field()) + } + } + + g.Prune() +} + +func (g *Group) GetOrCreateField(ps *Schema, fid FieldID) (*Field, bool) { + if g == nil || ps == nil { + return nil, false + } + psg := ps.Groups().Group(g.SchemaGroup()) + if psg == nil { + return nil, false + } + + psf := psg.Field(fid) + if psf == nil { + return nil, false + } + + psfid := psf.ID() + field := g.Field(psfid) + if field != nil { + return field, false + } + + // if the field does not exist, create it here + field = FieldFrom(psf).Type(psf.Type()).Build() + if field == nil { + return nil, false + } + + g.AddFields(field) + return field, true +} + +func (g *Group) AddFields(fields ...*Field) { + if g == nil { + return + } + for _, f := range fields { + _ = g.RemoveField(f.Field()) + g.fields = append(g.fields, f) + } +} + +func (g *Group) RemoveField(fid FieldID) (res bool) { + if g == nil { + return false + } + for i, f := range g.fields { + if f.Field() == fid { + g.fields = append(g.fields[:i], g.fields[i+1:]...) + return true + } + } + return false +} + +func (g *Group) FieldIDs() []FieldID { + if g == nil { + return nil + } + fields := make([]FieldID, 0, len(g.fields)) + for _, f := range g.fields { + fields = append(fields, f.Field()) + } + return fields +} + +// Field returns a field whose id is specified +func (g *Group) Field(fid FieldID) *Field { + if g == nil { + return nil + } + for _, f := range g.fields { + if f.Field() == fid { + return f + } + } + return nil +} + +func (g *Group) MigrateDataset(q DatasetMigrationParam) { + if g == nil { + return + } + for _, f := range g.fields { + f.MigrateDataset(q) + } +} + +func (g *Group) RepresentativeField(schema *Schema) *Field { + if g == nil || schema == nil { + return nil + } + if psg := schema.Groups().Group(g.itemBase.SchemaGroup); psg != nil { + if representativeField := psg.RepresentativeFieldID(); representativeField != nil { + if f, _ := g.GetOrCreateField(schema, *representativeField); f != nil { + return f + } + } + } + return nil +} + +func (p *Group) ValidateSchema(ps *SchemaGroup) error { + if p == nil { + return nil + } + if ps == nil { + return errors.New("invalid schema") + } + if p.SchemaGroup() != ps.ID() { + return errors.New("invalid schema group id") + } + + for _, i := range p.fields { + f := ps.Field(i.Field()) + if f.Type() != i.Type() { + return errors.New("invalid field type") + } + } + + return nil +} + +func (p *Group) Clone() *Group { + if p == nil { + return nil + } + fields := make([]*Field, 0, len(p.fields)) + for _, f := range p.fields { + fields = append(fields, f.Clone()) + } + return &Group{ + fields: fields, + itemBase: p.itemBase, + } +} + +func (p *Group) CloneItem() Item { + return p.Clone() +} + +func (g *Group) Fields(p *Pointer) []*Field { + if g == nil || len(g.fields) == 0 || (p != nil && !p.TestItem(g.SchemaGroup(), g.ID())) { + return nil + } + + if fid, ok := p.Field(); ok { + if f := g.Field(fid); f != nil { + return []*Field{f} + } + return nil + } + + return append(g.fields[:0:0], g.fields...) +} + +func (g *Group) RemoveFields(ptr *Pointer) (res bool) { + if g == nil || ptr == nil { + return false + } + if f, ok := ptr.FieldIfItemIs(g.SchemaGroup(), g.ID()); ok { + if g.RemoveField(f) { + res = true + } + } + return +} + +func (p *Group) GroupAndFields(ptr *Pointer) []GroupAndField { + if p == nil || len(p.fields) == 0 { + return nil + } + res := []GroupAndField{} + for _, f := range p.fields { + if ptr == nil || ptr.Test(p.SchemaGroup(), p.ID(), f.Field()) { + res = append(res, GroupAndField{ + Group: p, + Field: f, + }) + } + } + return res +} + +func (g *Group) GuessSchema() *SchemaGroup { + if g == nil { + return nil + } + + fields := make([]*SchemaField, 0, len(g.fields)) + for _, f := range g.fields { + if sf := f.GuessSchema(); sf != nil { + fields = append(fields, sf) + } + } + + // TODO: error handling + sg, _ := NewSchemaGroup().ID(g.SchemaGroup()).Fields(fields).Build() + return sg +} diff --git a/server/pkg/property/group_builder.go b/server/pkg/property/group_builder.go new file mode 100644 index 000000000..6ec41ced7 --- /dev/null +++ b/server/pkg/property/group_builder.go @@ -0,0 +1,75 @@ +package property + +type GroupBuilder struct { + p *Group +} + +func NewGroup() *GroupBuilder { + return &GroupBuilder{ + p: &Group{}, + } +} + +func InitGroupFrom(g *SchemaGroup) *Group { + if g == nil { + return nil + } + g2, _ := NewGroup().NewID().SchemaGroup(g.ID()).Build() + return g2 +} + +func (b *GroupBuilder) Build() (*Group, error) { + if b.p.itemBase.ID.IsNil() { + return nil, ErrInvalidID + } + if b.p.itemBase.SchemaGroup == "" { + return nil, ErrInvalidID + } + return b.p, nil +} + +func (b *GroupBuilder) MustBuild() *Group { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *GroupBuilder) base(base itemBase) *GroupBuilder { + b.p.itemBase = base + return b +} + +func (b *GroupBuilder) ID(id ItemID) *GroupBuilder { + b.p.itemBase.ID = id + return b +} + +func (b *GroupBuilder) NewID() *GroupBuilder { + nid := NewItemID + b.p.itemBase.ID = nid() + return b +} + +func (b *GroupBuilder) SchemaGroup(g SchemaGroupID) *GroupBuilder { + b.p.itemBase.SchemaGroup = g + return b +} + +func (b *GroupBuilder) Fields(fields []*Field) *GroupBuilder { + var newFields []*Field + ids := map[FieldID]struct{}{} + for _, f := range fields { + if f == nil { + continue + } + if _, ok := ids[f.Field()]; ok { + continue + } + ids[f.Field()] = struct{}{} + newFields = append(newFields, f) + } + b.p.fields = newFields + return b +} diff --git a/server/pkg/property/group_builder_test.go b/server/pkg/property/group_builder_test.go new file mode 100644 index 000000000..8dd6dc3d4 --- /dev/null +++ b/server/pkg/property/group_builder_test.go @@ -0,0 +1,139 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGroupBuilder_Build(t *testing.T) { + iid := NewItemID() + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + + type args struct { + ID ItemID + SchemaGroup SchemaGroupID + Fields []*Field + } + + tests := []struct { + Name string + Args args + Expected *Group + Err error + }{ + { + Name: "fail invalid id", + Err: ErrInvalidID, + }, + { + Name: "success", + Args: args{ + ID: iid, + SchemaGroup: "a", + Fields: []*Field{f}, + }, + Expected: &Group{ + itemBase: itemBase{ + ID: iid, + SchemaGroup: "a", + }, + fields: []*Field{f}, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := NewGroup(). + ID(tt.Args.ID). + Fields(tt.Args.Fields). + SchemaGroup(tt.Args.SchemaGroup). + Build() + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) + } else { + assert.Equal(t, tt.Err, err) + } + }) + } +} + +func TestGroupBuilder_MustBuild(t *testing.T) { + iid := NewItemID() + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + + type args struct { + ID ItemID + SchemaGroup SchemaGroupID + Fields []*Field + } + + tests := []struct { + Name string + Args args + Expected *Group + Err error + }{ + { + Name: "fail invalid id", + Err: ErrInvalidID, + }, + { + Name: "success", + Args: args{ + ID: iid, + SchemaGroup: "a", + Fields: []*Field{f}, + }, + Expected: &Group{ + itemBase: itemBase{ + ID: iid, + SchemaGroup: "a", + }, + fields: []*Field{f}, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + build := func() *Group { + t.Helper() + return NewGroup(). + ID(tt.Args.ID). + Fields(tt.Args.Fields). + SchemaGroup(tt.Args.SchemaGroup). + MustBuild() + } + + if tt.Err != nil { + assert.PanicsWithValue(t, tt.Err, func() { _ = build() }) + } else { + assert.Equal(t, tt.Expected, build()) + } + }) + } +} + +func TestGroupBuilder_NewID(t *testing.T) { + g := NewGroup().NewID().SchemaGroup("x").MustBuild() + assert.False(t, g.ID().IsEmpty()) +} + +func TestGroupBuilder_InitGroupFrom(t *testing.T) { + var sg *SchemaGroup + assert.Nil(t, InitGroupFrom(sg)) + sg = NewSchemaGroup().ID("a").MustBuild() + g := InitGroupFrom(sg) + assert.Equal(t, sg.ID(), g.SchemaGroup()) +} diff --git a/server/pkg/property/group_list.go b/server/pkg/property/group_list.go new file mode 100644 index 000000000..a07083b74 --- /dev/null +++ b/server/pkg/property/group_list.go @@ -0,0 +1,473 @@ +package property + +import ( + "context" + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/dataset" +) + +type GroupList struct { + itemBase + groups []*Group +} + +// List implements Item interface +var _ Item = &GroupList{} + +// ID returns id +func (g *GroupList) ID() ItemID { + if g == nil { + return ItemID{} + } + return g.itemBase.ID +} + +// IDRef returns a reference of id +func (g *GroupList) IDRef() *ItemID { + if g == nil { + return nil + } + return g.itemBase.ID.Ref() +} + +// SchemaGroup returns id of schema group +func (g *GroupList) SchemaGroup() SchemaGroupID { + if g == nil { + return SchemaGroupID("") + } + return g.itemBase.SchemaGroup +} + +func (g *GroupList) SchemaGroupRef() *SchemaGroupID { + if g == nil { + return nil + } + return g.itemBase.SchemaGroup.Ref() +} + +func (g *GroupList) HasLinkedField() bool { + if g == nil { + return false + } + for _, f := range g.groups { + if f.HasLinkedField() { + return true + } + } + return false +} + +func (g *GroupList) Datasets() []DatasetID { + if g == nil { + return nil + } + res := []DatasetID{} + + for _, f := range g.groups { + res = append(res, f.Datasets()...) + } + + return res +} + +func (g *GroupList) FieldsByLinkedDataset(s DatasetSchemaID, i DatasetID) []*Field { + if g == nil { + return nil + } + res := []*Field{} + for _, g := range g.groups { + res = append(res, g.FieldsByLinkedDataset(s, i)...) + } + return res +} + +func (g *GroupList) IsDatasetLinked(s DatasetSchemaID, i DatasetID) bool { + if g == nil { + return false + } + for _, d := range g.groups { + if d.IsDatasetLinked(s, i) { + return true + } + } + return false +} + +func (g *GroupList) IsEmpty() bool { + return g != nil && (g.groups == nil || len(g.groups) == 0) +} + +func (g *GroupList) Prune() (res bool) { + if g == nil { + return + } + for _, f := range g.groups { + if f.Prune() { + res = true + } + } + return +} + +func (g *GroupList) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset.Loader) { + if g == nil || dl == nil { + return + } + + for _, f := range g.groups { + f.MigrateSchema(ctx, newSchema, dl) + } + + g.Prune() +} + +// Groups returns a slice of groups +func (g *GroupList) Groups() []*Group { + if g == nil { + return nil + } + return append([]*Group{}, g.groups...) +} + +// Group returns a group whose id is specified +func (g *GroupList) Group(gid ItemID) *Group { + if g == nil { + return nil + } + for _, g := range g.groups { + if g.ID() == gid { + return g + } + } + return nil +} + +func (g *GroupList) GroupByPointer(ptr *Pointer) *Group { + if g == nil { + return nil + } + gid, ok := ptr.Item() + if !ok { + return nil + } + return g.Group(gid) +} + +func (p *GroupList) Clone() *GroupList { + if p == nil { + return nil + } + groups := make([]*Group, 0, len(p.groups)) + for _, g := range p.groups { + groups = append(groups, g.Clone()) + } + return &GroupList{ + groups: groups, + itemBase: p.itemBase, + } +} + +func (p *GroupList) CloneItem() Item { + return p.Clone() +} + +func (g *GroupList) Fields(ptr *Pointer) []*Field { + if g == nil || len(g.groups) == 0 || (ptr != nil && !ptr.TestSchemaGroup(g.SchemaGroup())) { + return nil + } + + if pi, ok := ptr.Item(); ok && g.ID() != pi { + return g.Group(pi).Fields(ptr) + } + + if fid, ok := ptr.Field(); ok { + ptr = PointFieldOnly(fid) + } + + var fields []*Field + for _, g := range g.groups { + if f := g.Fields(ptr); len(f) > 0 { + fields = append(fields, f...) + } + } + return fields +} + +func (g *GroupList) RemoveFields(ptr *Pointer) (res bool) { + if g == nil { + return + } + + if i, ok := ptr.Item(); ok && g.ID() != i { + return g.GroupByPointer(ptr).RemoveFields(ptr) + } + + if i, ok := ptr.ItemBySchemaGroup(); ok && g.SchemaGroup() != i { + return g.GroupByPointer(ptr).RemoveFields(ptr) + } + + if fid, ok := ptr.Field(); ok { + for _, g := range g.groups { + if g.RemoveField(fid) { + res = true + } + } + } + + return +} + +func (p *GroupList) GroupAndFields(ptr *Pointer) []GroupAndField { + if p == nil || len(p.groups) == 0 { + return nil + } + res := []GroupAndField{} + for _, g := range p.groups { + if ptr == nil || ptr.TestItem(g.SchemaGroup(), g.ID()) { + for _, r := range g.GroupAndFields(ptr) { + res = append(res, GroupAndField{ + ParentGroup: p, + Group: r.Group, + Field: r.Field, + }) + } + } + } + return res +} + +func (g *GroupList) GuessSchema() *SchemaGroup { + if g == nil { + return nil + } + + fieldm := map[FieldID]struct{}{} + fields := []*SchemaField{} + + for _, g := range g.groups { + if gsg := g.GuessSchema(); gsg != nil { + for _, f := range gsg.Fields() { + if _, ok := fieldm[f.ID()]; ok { + continue + } + fields = append(fields, f) + fieldm[f.ID()] = struct{}{} + } + } + } + + // TODO: error handling + sg, _ := NewSchemaGroup().ID(g.SchemaGroup()).IsList(true).Fields(fields).Build() + return sg +} + +// GroupAt returns a group whose index is specified +func (g *GroupList) GroupAt(i int) *Group { + if g == nil || i < 0 || i > len(g.groups)-1 { + return nil + } + return g.groups[i] +} + +func (g *GroupList) Has(i ItemID) bool { + if g == nil { + return false + } + for _, gg := range g.groups { + if gg.ID() == i { + return true + } + } + return false +} + +func (g *GroupList) Count() int { + if g == nil { + return 0 + } + return len(g.groups) +} + +func (g *GroupList) Add(gg *Group, index int) { + if g == nil || g.Has(gg.ID()) { + return + } + + le := len(g.groups) + if index < 0 || le <= index { + g.groups = append(g.groups, gg) + } else { + g.groups = append(g.groups[:index], append([]*Group{gg}, g.groups[index:]...)...) + } +} + +func (g *GroupList) AddOrMove(gg *Group, index int) { + if g == nil { + return + } + + le := len(g.groups) + if index < 0 || le <= index { + index = le + } + + gid := gg.ID() + if g.Has(gid) { + g.Move(gid, index) + return + } + g.groups = append(g.groups[:index], append([]*Group{gg}, g.groups[index:]...)...) +} + +func (g *GroupList) Move(id ItemID, toIndex int) { + if g == nil { + return + } + + for fromIndex, gg := range g.groups { + if gg.ID() == id { + g.MoveAt(fromIndex, toIndex) + return + } + } +} + +func (g *GroupList) MoveAt(fromIndex int, toIndex int) { + if g == nil { + return + } + + le := len(g.groups) + if fromIndex < 0 || le <= fromIndex { + return + } + if toIndex < 0 || le <= toIndex { + toIndex = le - 1 + } + if fromIndex == toIndex { + return + } + + f := g.groups[fromIndex] + g.groups = append(g.groups[:fromIndex], g.groups[fromIndex+1:]...) + newSlice := make([]*Group, toIndex+1) + copy(newSlice, g.groups[:toIndex]) + newSlice[toIndex] = f + g.groups = append(newSlice, g.groups[toIndex:]...) +} + +func (g *GroupList) Remove(id ItemID) bool { + if g == nil { + return false + } + + for index, gg := range g.groups { + if gg.ID() == id { + g.RemoveAt(index) + return true + } + } + + return false +} + +func (g *GroupList) RemoveAt(index int) { + if g == nil { + return + } + + le := len(g.groups) + if index < 0 || le <= index { + return + } + var groups []*Group + if index == le { + groups = []*Group{} + } else { + groups = g.groups[index+1:] + } + g.groups = append(g.groups[:index], groups...) +} + +func (g *GroupList) Empty() { + if g == nil { + return + } + + g.groups = []*Group{} +} + +func (g *GroupList) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, bool) { + if g == nil || ptr == nil || ps == nil { + return nil, false + } + psg := ps.Groups().Group(g.SchemaGroup()) + if psg == nil { + return nil, false + } + + item, fid, ok := ptr.FieldByItem() + if !ok { + return nil, false + } + + i := g.Group(item) + if i == nil { + return nil, false + } + + return i.GetOrCreateField(ps, fid) +} + +func (g *GroupList) CreateAndAddListItem(ps *Schema, index *int) *Group { + if g == nil || ps == nil { + return nil + } + psg := ps.Groups().Group(g.SchemaGroup()) + if psg == nil { + return nil + } + + index2 := -1 + if index != nil { + index2 = *index + } + + if ni := InitGroupFrom(psg); ni != nil { + g.Add(ni, index2) + return ni + } + + return nil +} + +func (g *GroupList) MigrateDataset(q DatasetMigrationParam) { + if g == nil { + return + } + for _, f := range g.groups { + f.MigrateDataset(q) + } +} + +func (p *GroupList) ValidateSchema(ps *SchemaGroup) error { + if p == nil { + return nil + } + if ps == nil { + return errors.New("invalid schema") + } + if p.SchemaGroup() != ps.ID() { + return errors.New("invalid schema group id") + } + + for _, i := range p.groups { + if err := i.ValidateSchema(ps); err != nil { + return fmt.Errorf("%s: %w", i.ID(), err) + } + } + + return nil +} diff --git a/server/pkg/property/group_list_builder.go b/server/pkg/property/group_list_builder.go new file mode 100644 index 000000000..cd3b8bdd7 --- /dev/null +++ b/server/pkg/property/group_list_builder.go @@ -0,0 +1,83 @@ +package property + +import "errors" + +var ErrInvalidGroupInGroupList = errors.New("cannot contain an invalid property group in the property group list") + +type GroupListBuilder struct { + p *GroupList +} + +func NewGroupList() *GroupListBuilder { + return &GroupListBuilder{ + p: &GroupList{}, + } +} + +func InitGroupListFrom(g *SchemaGroup) *GroupList { + if g == nil || !g.IsList() { + return nil + } + g2, _ := NewGroupList().NewID().SchemaGroup(g.ID()).Build() + return g2 +} + +func (b *GroupListBuilder) Build() (*GroupList, error) { + if b.p.itemBase.ID.IsNil() { + return nil, ErrInvalidID + } + if b.p.itemBase.SchemaGroup == "" { + return nil, ErrInvalidID + } + for _, g := range b.p.groups { + if g.SchemaGroup() != b.p.SchemaGroup() { + return nil, ErrInvalidGroupInGroupList + } + } + return b.p, nil +} + +func (b *GroupListBuilder) MustBuild() *GroupList { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *GroupListBuilder) base(base itemBase) *GroupListBuilder { + b.p.itemBase = base + return b +} + +func (b *GroupListBuilder) ID(id ItemID) *GroupListBuilder { + b.p.itemBase.ID = id + return b +} + +func (b *GroupListBuilder) NewID() *GroupListBuilder { + b.p.itemBase.ID = NewItemID() + return b +} + +func (b *GroupListBuilder) SchemaGroup(g SchemaGroupID) *GroupListBuilder { + b.p.itemBase.SchemaGroup = g + return b +} + +func (b *GroupListBuilder) Groups(groups []*Group) *GroupListBuilder { + newGroups := []*Group{} + ids := map[ItemID]struct{}{} + for _, g := range groups { + if g == nil { + continue + } + if _, ok := ids[g.ID()]; ok { + continue + } + ids[g.ID()] = struct{}{} + newGroups = append(newGroups, g) + } + b.p.groups = newGroups + return b +} diff --git a/server/pkg/property/group_list_builder_test.go b/server/pkg/property/group_list_builder_test.go new file mode 100644 index 000000000..a9046cfa9 --- /dev/null +++ b/server/pkg/property/group_list_builder_test.go @@ -0,0 +1,175 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGroupListBuilder_Build(t *testing.T) { + pid := NewItemID() + groups := []*Group{NewGroup().ID(pid).SchemaGroup("x").MustBuild()} + + type args struct { + ID ItemID + SchemaGroup SchemaGroupID + Groups []*Group + } + + tests := []struct { + Name string + Args args + Expected *GroupList + Err error + }{ + { + Name: "success", + Args: args{ + ID: pid, + SchemaGroup: "x", + Groups: groups, + }, + Expected: &GroupList{ + itemBase: itemBase{ + ID: pid, + SchemaGroup: "x", + }, + groups: groups, + }, + }, + { + Name: "fail invalid group", + Args: args{ + ID: pid, + SchemaGroup: "aa", + Groups: groups, + }, + Err: ErrInvalidGroupInGroupList, + }, + { + Name: "fail invalid id", + Err: ErrInvalidID, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := NewGroupList(). + ID(tt.Args.ID). + SchemaGroup(tt.Args.SchemaGroup). + Groups(tt.Args.Groups). + Build() + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) + } else { + assert.Equal(t, tt.Err, err) + } + }) + } +} + +func TestGroupListBuilder_NewID(t *testing.T) { + b := NewGroupList().NewID().SchemaGroup("x").MustBuild() + assert.NotNil(t, b.ID()) +} + +func TestGroupListBuilder_MustBuild(t *testing.T) { + pid := NewItemID() + groups := []*Group{NewGroup().ID(pid).SchemaGroup("x").MustBuild()} + + type args struct { + ID ItemID + SchemaGroup SchemaGroupID + Groups []*Group + } + + tests := []struct { + Name string + Args args + Err error + Expected *GroupList + }{ + { + Name: "success", + Args: args{ + ID: pid, + SchemaGroup: "x", + Groups: groups, + }, + Expected: &GroupList{ + itemBase: itemBase{ + ID: pid, + SchemaGroup: "x", + }, + groups: groups, + }, + }, + { + Name: "fail invalid group", + Args: args{ + ID: pid, + SchemaGroup: "aa", + Groups: groups, + }, + Err: ErrInvalidGroupInGroupList, + }, + { + Name: "fail invalid id", + Err: ErrInvalidID, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + build := func() *GroupList { + t.Helper() + return NewGroupList(). + ID(tc.Args.ID). + SchemaGroup(tc.Args.SchemaGroup). + Groups(tc.Args.Groups). + MustBuild() + } + + if tc.Err != nil { + assert.PanicsWithValue(t, tc.Err, func() { _ = build() }) + } else { + assert.Equal(t, tc.Expected, build()) + } + }) + } +} + +func TestInitGroupListFrom(t *testing.T) { + tests := []struct { + Name string + SchemaGroup *SchemaGroup + Expected SchemaGroupID + }{ + { + Name: "nil schema group", + }, + { + Name: "success", + SchemaGroup: NewSchemaGroup().ID("aa").MustBuild(), + Expected: "aa", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := InitGroupFrom(tc.SchemaGroup) + if tc.Expected != "" { + assert.Equal(t, tc.Expected, res.SchemaGroup()) + } else { + assert.Nil(t, res) + } + }) + } +} diff --git a/server/pkg/property/group_list_test.go b/server/pkg/property/group_list_test.go new file mode 100644 index 000000000..08886d060 --- /dev/null +++ b/server/pkg/property/group_list_test.go @@ -0,0 +1,1070 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +var ( + testGroupList1 = NewGroupList().NewID().SchemaGroup(testSchemaGroup2.ID()).Groups([]*Group{testGroup2}).MustBuild() +) + +func TestGroupList_IDRef(t *testing.T) { + id := NewItemID() + assert.Nil(t, (*GroupList)(nil).IDRef()) + assert.Equal(t, &id, (&GroupList{ + itemBase: itemBase{ID: id}, + }).IDRef()) +} + +func TestGroupList_SchemaRef(t *testing.T) { + tests := []struct { + Name string + GL *GroupList + ExpectedSG *SchemaGroupID + ExpectedSchema *SchemaID + }{ + { + Name: "nil group list", + }, + { + Name: "success", + GL: NewGroupList().NewID().SchemaGroup(SchemaGroupID("xx")).MustBuild(), + ExpectedSG: SchemaGroupID("xx").Ref(), + ExpectedSchema: MustSchemaID("xx~1.0.0/aa").Ref(), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.ExpectedSG, tc.GL.SchemaGroupRef()) + }) + } +} + +func TestGroupList_HasLinkedField(t *testing.T) { + pid := NewItemID() + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + f := FieldFrom(sf). + Value(OptionalValueFrom(v)). + Links(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}). + MustBuild() + groups := []*Group{NewGroup().ID(pid).SchemaGroup("xx").Fields([]*Field{f}).MustBuild()} + groups2 := []*Group{NewGroup().ID(pid).SchemaGroup("xx").MustBuild()} + + tests := []struct { + Name string + GL *GroupList + Expected bool + }{ + { + Name: "nil group list", + }, + { + Name: "has linked field", + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups).MustBuild(), + Expected: true, + }, + { + Name: "no linked field", + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups2).MustBuild(), + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.HasLinkedField()) + assert.Equal(t, tc.Expected, tc.GL.IsDatasetLinked(dssid, dsid)) + }) + } +} + +func TestGroupList_Datasets(t *testing.T) { + pid := NewItemID() + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + f := FieldFrom(sf). + Value(OptionalValueFrom(v)). + Links(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}). + MustBuild() + groups := []*Group{NewGroup().ID(pid).SchemaGroup("xx").Fields([]*Field{f}).MustBuild()} + groups2 := []*Group{NewGroup().ID(pid).SchemaGroup("xx").MustBuild()} + + tests := []struct { + Name string + GL *GroupList + Expected []DatasetID + }{ + { + Name: "nil group list", + }, + { + Name: "one dataset", + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups).MustBuild(), + Expected: []DatasetID{dsid}, + }, + { + Name: "empty list", + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups2).MustBuild(), + Expected: []DatasetID{}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.Datasets()) + }) + } +} + +func TestGroupList_FieldsByLinkedDataset(t *testing.T) { + pid := NewItemID() + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + f := FieldFrom(sf). + Value(OptionalValueFrom(v)). + Links(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}). + MustBuild() + groups := []*Group{NewGroup().ID(pid).SchemaGroup("xx").Fields([]*Field{f}).MustBuild()} + groups2 := []*Group{NewGroup().ID(pid).SchemaGroup("xx").MustBuild()} + + tests := []struct { + Name string + GL *GroupList + Expected []*Field + }{ + { + Name: "nil group list", + }, + { + Name: "one field list", + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups).MustBuild(), + Expected: []*Field{f}, + }, + { + Name: "empty list", + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups2).MustBuild(), + Expected: []*Field{}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.FieldsByLinkedDataset(dssid, dsid)) + }) + } +} + +func TestGroupList_IsEmpty(t *testing.T) { + pid := NewItemID() + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + f := FieldFrom(sf). + Value(OptionalValueFrom(v)). + Links(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}). + MustBuild() + groups := []*Group{NewGroup().ID(pid).SchemaGroup("xx").Fields([]*Field{f}).MustBuild()} + + tests := []struct { + Name string + GL *GroupList + Expected bool + }{ + { + Name: "nil group list", + }, + { + Name: "is empty", + GL: NewGroupList().NewID().SchemaGroup("xx").MustBuild(), + Expected: true, + }, + { + Name: "is not empty", + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups).MustBuild(), + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.IsEmpty()) + }) + } +} + +func TestGroupList_Prune(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + f2 := FieldFrom(sf).MustBuild() + pid := NewItemID() + groups := []*Group{NewGroup().ID(pid).SchemaGroup("xx").Fields([]*Field{f, f2}).MustBuild()} + pruned := []*Group{NewGroup().ID(pid).SchemaGroup("xx").Fields([]*Field{f}).MustBuild()} + + tests := []struct { + Name string + GL *GroupList + Expected []*Group + }{ + { + Name: "nil group list", + }, + { + Name: "pruned list", + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups).MustBuild(), + Expected: pruned, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + tc.GL.Prune() + assert.Equal(t, tc.Expected, tc.GL.Groups()) + }) + } +} + +func TestGroupList_Group(t *testing.T) { + pid := NewItemID() + g := NewGroup().ID(pid).SchemaGroup("xx").MustBuild() + + tests := []struct { + Name string + Input ItemID + GL *GroupList + Expected *Group + }{ + { + Name: "nil group list", + }, + { + Name: "found", + Input: pid, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g}).MustBuild(), + Expected: g, + }, + { + Name: "not found", + Input: NewItemID(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g}).MustBuild(), + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.Group(tc.Input)) + }) + } +} + +func TestGroupList_GroupAt(t *testing.T) { + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + + tests := []struct { + Name string + Index int + GL *GroupList + Expected *Group + }{ + { + Name: "nil group list", + }, + { + Name: "index < 0", + Index: -1, + }, + { + Name: "index > len(g)-1", + Index: 4, + }, + { + Name: "found", + Index: 2, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: g3, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.GroupAt(tc.Index)) + }) + } +} + +func TestGroupList_Has(t *testing.T) { + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + + tests := []struct { + Name string + Input ItemID + GL *GroupList + Expected bool + }{ + { + Name: "nil group list", + }, + { + Name: "found", + Input: g2.ID(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: true, + }, + { + Name: "not found", + Input: g3.ID(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g4}).MustBuild(), + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.Has(tc.Input)) + }) + } +} + +func TestGroupList_Count(t *testing.T) { + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + + tests := []struct { + Name string + GL *GroupList + Expected int + }{ + { + Name: "nil group list", + }, + { + Name: "not found", + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: 4, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.Count()) + }) + } +} + +func TestGroupList_Add(t *testing.T) { + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + + tests := []struct { + Name string + GL *GroupList + Gr *Group + Index int + Expected struct { + Gr *Group + Index int + } + }{ + { + Name: "nil group list", + }, + { + Name: "index < 0", + Index: -1, + Gr: g2, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + Expected: struct { + Gr *Group + Index int + }{ + Gr: g2, + Index: 3, + }, + }, + { + Name: "len(g) > index > 0 ", + Index: 2, + Gr: g2, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + Expected: struct { + Gr *Group + Index int + }{ + Gr: g2, + Index: 2, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.GL.Add(tt.Gr, tt.Index) + assert.Equal(t, tt.Expected.Gr, tt.GL.GroupAt(tt.Expected.Index)) + }) + } +} + +func TestGroupList_AddOrMove(t *testing.T) { + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + + tests := []struct { + Name string + GL *GroupList + Gr *Group + Index int + Expected struct { + Gr *Group + Index int + } + }{ + { + Name: "nil group list", + }, + { + Name: "index < 0", + Index: -1, + Gr: g2, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + Expected: struct { + Gr *Group + Index int + }{ + Gr: g2, + Index: 3, + }, + }, + { + Name: "len(g) > index > 0 ", + Index: 2, + Gr: g2, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + Expected: struct { + Gr *Group + Index int + }{ + Gr: g2, + Index: 2, + }, + }, + { + Name: "move group", + Index: 2, + Gr: g1, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + Expected: struct { + Gr *Group + Index int + }{ + Gr: g1, + Index: 2, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.GL.AddOrMove(tt.Gr, tt.Index) + assert.Equal(t, tt.Expected.Gr, tt.GL.GroupAt(tt.Expected.Index)) + }) + } +} + +func TestGroupList_Move(t *testing.T) { + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + + tests := []struct { + Name string + GL *GroupList + ID ItemID + ToIndex int + ExpectedID ItemID + ExpectedIndex int + }{ + { + Name: "nil group list", + }, + { + Name: "success", + ID: g1.ID(), + ToIndex: 2, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + ExpectedID: g1.ID(), + ExpectedIndex: 2, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.GL.Move(tt.ID, tt.ToIndex) + g := tt.GL.GroupAt(tt.ExpectedIndex) + if !tt.ExpectedID.IsNil() { + assert.Equal(t, tt.ExpectedID, g.ID()) + } + }) + } +} + +func TestGroupList_MoveAt(t *testing.T) { + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + + tests := []struct { + Name string + GL *GroupList + FromIndex, ToIndex int + Expected []*Group + }{ + { + Name: "nil group list", + }, + { + Name: "from = to", + FromIndex: 2, + ToIndex: 2, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: []*Group{g1, g2, g3, g4}, + }, + { + Name: "from < 0", + FromIndex: -1, + ToIndex: 2, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: []*Group{g1, g2, g3, g4}, + }, + { + Name: "success move", + FromIndex: 0, + ToIndex: 2, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: []*Group{g2, g3, g1, g4}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.GL.MoveAt(tt.FromIndex, tt.ToIndex) + assert.Equal(t, tt.Expected, tt.GL.Groups()) + }) + } +} + +func TestGroupList_RemoveAt(t *testing.T) { + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + + tests := []struct { + Name string + GL *GroupList + Index int + Expected []*Group + }{ + { + Name: "nil group list", + }, + { + Name: "success", + Index: 1, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: []*Group{g1, g3, g4}, + }, + { + Name: "index < 0", + Index: -1, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: []*Group{g1, g2, g3, g4}, + }, + { + Name: "index > length", + Index: 5, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: []*Group{g1, g2, g3, g4}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.GL.RemoveAt(tt.Index) + assert.Equal(t, tt.Expected, tt.GL.Groups()) + }) + } +} +func TestGroupList_Remove(t *testing.T) { + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + + tests := []struct { + Name string + GL *GroupList + Input ItemID + Expected bool + }{ + { + Name: "nil group list", + }, + { + Name: "success", + Input: g1.ID(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: true, + }, + { + Name: "not found", + Input: g4.ID(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3}).MustBuild(), + Expected: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.GL.Remove(tt.Input) + assert.Equal(t, tt.Expected, res) + }) + } +} + +func TestGroupList_GetOrCreateField(t *testing.T) { + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aa").Fields([]*SchemaField{sf}).MustBuild() + g := NewGroup().ID(NewItemID()).SchemaGroup(sg.ID()).MustBuild() + s := NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild() + + tests := []struct { + Name string + Target *GroupList + Schema *Schema + Ptr *Pointer + Expected *Field + ExpectedOK bool + }{ + { + Name: "success", + Target: NewGroupList().NewID().SchemaGroup("aa").Groups([]*Group{g}).MustBuild(), + Schema: s, + Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), + Expected: FieldFrom(sf).MustBuild(), + ExpectedOK: true, + }, + { + Name: "can't get a group", + Target: NewGroupList().NewID().SchemaGroup("aa").MustBuild(), + Schema: s, + Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), + }, + { + Name: "FieldByItem not ok: sg != nil", + Target: NewGroupList().NewID().SchemaGroup("aa").Groups([]*Group{g}).MustBuild(), + Schema: s, + Ptr: NewPointer(sg.IDRef(), g.IDRef(), sf.ID().Ref()), + }, + { + Name: "psg == nil", + Target: nil, + Schema: s, + Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, ok := tt.Target.GetOrCreateField(tt.Schema, tt.Ptr) + assert.Equal(t, tt.Expected, res) + assert.Equal(t, tt.ExpectedOK, ok) + }) + } +} + +func TestGroupList_CreateAndAddListItem(t *testing.T) { + getIntRef := func(i int) *int { return &i } + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aa").Fields([]*SchemaField{sf}).MustBuild() + g := NewGroup().ID(NewItemID()).SchemaGroup(sg.ID()).MustBuild() + + tests := []struct { + Name string + GL *GroupList + Schema *Schema + Index *int + Expected *Group + }{ + { + Name: "success", + Index: getIntRef(0), + GL: NewGroupList().NewID().SchemaGroup("aa").MustBuild(), + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), + Expected: g, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.GL.CreateAndAddListItem(tt.Schema, tt.Index) + assert.Equal(t, tt.Expected.Fields(nil), res.Fields(nil)) + assert.Equal(t, tt.Expected.SchemaGroup(), res.SchemaGroup()) + }) + } +} + +func TestGroupList_Clone(t *testing.T) { + tests := []struct { + name string + target *GroupList + n bool + }{ + { + name: "ok", + target: testGroupList1.Clone(), + }, + { + name: "nil", + n: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := tt.target.Clone() + if tt.n { + assert.Nil(t, res) + } else { + assert.Equal(t, tt.target, res) + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestGroupList_CloneItem(t *testing.T) { + tests := []struct { + name string + target *GroupList + n bool + }{ + { + name: "ok", + target: testGroupList1.Clone(), + }, + { + name: "nil", + n: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := tt.target.CloneItem() + if tt.n { + assert.Nil(t, res) + } else { + assert.Equal(t, tt.target, res) + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestGroupList_Fields(t *testing.T) { + type args struct { + p *Pointer + } + tests := []struct { + name string + target *GroupList + args args + want []*Field + }{ + { + name: "all", + target: testGroupList1, + args: args{p: nil}, + want: []*Field{testField2}, + }, + { + name: "specified", + target: testGroupList1, + args: args{p: PointFieldOnly(testField2.Field())}, + want: []*Field{testField2}, + }, + { + name: "not found", + target: testGroupList1, + args: args{p: PointFieldOnly("xxxxxx")}, + want: nil, + }, + { + name: "empty", + target: &GroupList{}, + args: args{p: PointFieldOnly(testField2.Field())}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{p: PointFieldOnly(testField2.Field())}, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Fields(tt.args.p)) + }) + } +} + +func TestGroupList_RemoveFields(t *testing.T) { + type args struct { + p *Pointer + } + tests := []struct { + name string + target *GroupList + args args + want bool + wantFields []*Field + }{ + { + name: "nil pointer", + target: testGroupList1.Clone(), + args: args{p: nil}, + want: false, + wantFields: []*Field{testField2}, + }, + { + name: "specified", + target: testGroupList1.Clone(), + args: args{p: PointFieldOnly(testField2.Field())}, + want: true, + wantFields: nil, + }, + { + name: "specified schema group", + target: testGroupList1.Clone(), + args: args{p: PointItemBySchema(testGroupList1.SchemaGroup())}, + want: false, + wantFields: []*Field{testField2}, + }, + { + name: "specified item", + target: testGroupList1.Clone(), + args: args{p: PointItem(testGroupList1.ID())}, + want: false, + wantFields: []*Field{testField2}, + }, + { + name: "not found", + target: testGroupList1.Clone(), + args: args{p: PointFieldOnly("xxxxxx")}, + want: false, + wantFields: []*Field{testField2}, + }, + { + name: "empty", + target: &GroupList{}, + args: args{p: PointFieldOnly(testField1.Field())}, + want: false, + wantFields: nil, + }, + { + name: "nil", + target: nil, + args: args{p: PointFieldOnly(testField1.Field())}, + want: false, + wantFields: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.RemoveFields(tt.args.p)) + if tt.target != nil { + assert.Equal(t, tt.wantFields, tt.target.Fields(nil)) + } + }) + } +} + +func TestGroupList_GroupAndFields(t *testing.T) { + tests := []struct { + name string + target *GroupList + args *Pointer + want []GroupAndField + }{ + { + name: "all", + target: testGroupList1, + args: nil, + want: []GroupAndField{ + {ParentGroup: testGroupList1, Group: testGroup2, Field: testField2}, + }, + }, + { + name: "specified", + target: testGroupList1, + args: PointFieldByItem(testGroup2.ID(), testField2.Field()), + want: []GroupAndField{ + {ParentGroup: testGroupList1, Group: testGroup2, Field: testField2}, + }, + }, + { + name: "specified but not found", + target: testGroupList1, + args: PointFieldByItem(testGroup1.ID(), testField2.Field()), + want: []GroupAndField{}, + }, + { + name: "empty", + target: &GroupList{}, + args: nil, + want: nil, + }, + { + name: "nil", + target: nil, + args: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := tt.target.GroupAndFields(tt.args) + assert.Equal(t, tt.want, res) + for i, r := range res { + assert.Same(t, tt.want[i].Field, r.Field) + assert.Same(t, tt.want[i].Group, r.Group) + assert.Same(t, tt.want[i].ParentGroup, r.ParentGroup) + } + }) + } +} + +func TestGroupList_GuessSchema(t *testing.T) { + tests := []struct { + name string + target *GroupList + want *SchemaGroup + }{ + { + name: "ok", + target: &GroupList{ + itemBase: itemBase{ + SchemaGroup: "aa", + }, + groups: []*Group{ + { + itemBase: itemBase{ + SchemaGroup: "aa", + }, + fields: []*Field{ + {field: "a", v: NewOptionalValue(ValueTypeLatLng, nil)}, + }, + }, + { + itemBase: itemBase{ + SchemaGroup: "aa", + }, + fields: []*Field{ + {field: "b", v: NewOptionalValue(ValueTypeString, nil)}, + }, + }, + }, + }, + want: &SchemaGroup{ + id: "aa", + list: true, + fields: []*SchemaField{ + {id: "a", propertyType: ValueTypeLatLng}, + {id: "b", propertyType: ValueTypeString}, + }, + }, + }, + { + name: "empty", + target: &GroupList{}, + want: nil, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.GuessSchema()) + }) + } +} diff --git a/server/pkg/property/group_test.go b/server/pkg/property/group_test.go new file mode 100644 index 000000000..51cd993c4 --- /dev/null +++ b/server/pkg/property/group_test.go @@ -0,0 +1,485 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/value" + "github.com/stretchr/testify/assert" +) + +var ( + testGroup1 = NewGroup().NewID().SchemaGroup(testSchemaGroup1.ID()).Fields([]*Field{testField1}).MustBuild() + testGroup2 = NewGroup().NewID().SchemaGroup(testSchemaGroup2.ID()).Fields([]*Field{testField2}).MustBuild() +) + +func TestGroup_IDRef(t *testing.T) { + id := NewItemID() + assert.Nil(t, (*Group)(nil).IDRef()) + assert.Equal(t, &id, (&Group{ + itemBase: itemBase{ + ID: id, + }, + }).IDRef()) +} + +func TestGroup_SchemaGroup(t *testing.T) { + var g *Group + assert.Nil(t, g.SchemaGroupRef()) + + pfid := SchemaGroupID("aa") + g = NewGroup().NewID().SchemaGroup(pfid).MustBuild() + assert.Equal(t, pfid, g.SchemaGroup()) + assert.Equal(t, pfid.Ref(), g.SchemaGroupRef()) +} + +func TestGroup_HasLinkedField(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) + ls := NewLinks([]*Link{l}) + f := FieldFrom(sf).Value(OptionalValueFrom(v)).Links(ls).MustBuild() + f2 := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + + tests := []struct { + Name string + Group *Group + Expected bool + }{ + { + Name: "nil group", + Group: nil, + Expected: false, + }, + { + Name: "true", + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f}).MustBuild(), + Expected: true, + }, + { + Name: "false", + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f2}).MustBuild(), + Expected: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.HasLinkedField() + assert.Equal(t, tt.Expected, res) + }) + } +} +func TestGroup_IsDatasetLinked(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + l := NewLink(dsid, dssid, NewDatasetFieldID()) + ls := NewLinks([]*Link{l}) + f := FieldFrom(sf).Value(OptionalValueFrom(v)).Links(ls).MustBuild() + f2 := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + + tests := []struct { + Name string + Group *Group + DatasetSchema DatasetSchemaID + Dataset DatasetID + Expected bool + }{ + { + Name: "nil group", + }, + { + Name: "true", + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f}).MustBuild(), + Dataset: dsid, + DatasetSchema: dssid, + Expected: true, + }, + { + Name: "false", + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f2}).MustBuild(), + Expected: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.IsDatasetLinked(tt.DatasetSchema, tt.Dataset) + assert.Equal(t, tt.Expected, res) + }) + } +} + +func TestGroup_Datasets(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + dsid := NewDatasetID() + l := NewLink(dsid, NewDatasetSchemaID(), NewDatasetFieldID()) + ls := NewLinks([]*Link{l}) + f := FieldFrom(sf).Value(OptionalValueFrom(v)).Links(ls).MustBuild() + + tests := []struct { + Name string + Group *Group + Expected []DatasetID + }{ + { + Name: "nil group", + Group: nil, + Expected: nil, + }, + { + Name: "normal case", + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f}).MustBuild(), + Expected: []DatasetID{dsid}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.Datasets() + assert.Equal(t, tt.Expected, res) + }) + } +} + +func TestGroup_FieldsByLinkedDataset(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + l := NewLink(dsid, dssid, NewDatasetFieldID()) + ls := NewLinks([]*Link{l}) + f := FieldFrom(sf).Value(OptionalValueFrom(v)).Links(ls).MustBuild() + + tests := []struct { + Name string + Group *Group + DatasetSchema DatasetSchemaID + DataSet DatasetID + Expected []*Field + }{ + { + Name: "nil group", + }, + { + Name: "normal case", + DataSet: dsid, + DatasetSchema: dssid, + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f}).MustBuild(), + Expected: []*Field{f}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.FieldsByLinkedDataset(tt.DatasetSchema, tt.DataSet) + assert.Equal(t, tt.Expected, res) + }) + } +} + +func TestGroup_IsEmpty(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + f2 := FieldFrom(sf).MustBuild() + + tests := []struct { + Name string + Group *Group + Expected bool + }{ + + { + Name: "true case", + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f2}).MustBuild(), + Expected: true, + }, + { + Name: "false case", + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f}).MustBuild(), + Expected: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.IsEmpty() + assert.Equal(t, tt.Expected, res) + }) + } +} + +func TestGroup_Prune(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + f2 := FieldFrom(sf).MustBuild() + + tests := []struct { + Name string + Group *Group + Expected []*Field + }{ + + { + Name: "nil group", + }, + { + Name: "normal case", + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f, f2}).MustBuild(), + Expected: []*Field{f}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.Group.Prune() + assert.Equal(t, tt.Expected, tt.Group.Fields(nil)) + }) + } +} + +func TestGroup_GetOrCreateField(t *testing.T) { + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + f := FieldFrom(sf).MustBuild() + sg := NewSchemaGroup().ID("aa").Fields([]*SchemaField{sf}).MustBuild() + + tests := []struct { + Name string + Group *Group + PS *Schema + FID FieldID + Expected struct { + Field *Field + Bool bool + } + }{ + { + Name: "nil group", + }, + { + Name: "nil ps", + Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), + }, + { + Name: "group schema doesn't equal to ps", + Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), + PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), + }, + { + Name: "create field", + Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), + PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), + FID: "aa", + Expected: struct { + Field *Field + Bool bool + }{ + Field: FieldFrom(sf).MustBuild(), + Bool: true, + }, + }, + { + Name: "get field", + Group: NewGroup().NewID().SchemaGroup("aa").Fields([]*Field{f}).MustBuild(), + PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), + FID: "aa", + Expected: struct { + Field *Field + Bool bool + }{ + Field: FieldFrom(sf).MustBuild(), + Bool: false, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, b := tt.Group.GetOrCreateField(tt.PS, tt.FID) + assert.Equal(t, tt.Expected.Field, res) + assert.Equal(t, tt.Expected.Bool, b) + }) + } +} + +func TestGroup_RemoveField(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + sf2 := NewSchemaField().ID("b").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + f2 := FieldFrom(sf2).MustBuild() + + tests := []struct { + Name string + Group *Group + Input FieldID + Expected []*Field + }{ + { + Name: "nil group", + }, + { + Name: "normal case", + Input: "b", + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f, f2}).MustBuild(), + Expected: []*Field{f}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.Group.RemoveField(tt.Input) + assert.Equal(t, tt.Expected, tt.Group.Fields(nil)) + }) + } +} + +func TestGroup_FieldIDs(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + sf2 := NewSchemaField().ID("b").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + f2 := FieldFrom(sf2).MustBuild() + + tests := []struct { + Name string + Group *Group + Expected []FieldID + }{ + { + Name: "nil group", + }, + { + Name: "normal case", + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f, f2}).MustBuild(), + Expected: []FieldID{"a", "b"}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.FieldIDs() + assert.Equal(t, tt.Expected, res) + }) + } +} + +func TestGroup_Field(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + sf2 := NewSchemaField().ID("b").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + f2 := FieldFrom(sf2).MustBuild() + + tests := []struct { + Name string + Group *Group + Input FieldID + Expected *Field + }{ + { + Name: "nil group", + }, + { + Name: "normal case", + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f, f2}).MustBuild(), + Input: "a", + Expected: f, + }, + { + Name: "normal case", + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f, f2}).MustBuild(), + Input: "x", + Expected: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.Field(tt.Input) + assert.Equal(t, tt.Expected, res) + }) + } +} + +func TestGroup_RepresentativeFieldValue(t *testing.T) { + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aa"). + Fields([]*SchemaField{sf}). + RepresentativeField(FieldID("aa").Ref()). + MustBuild() + sg2 := NewSchemaGroup(). + ID("bb"). + Fields([]*SchemaField{sf}). + MustBuild() + + type args struct { + Schema *Schema + Value *Value + } + + tests := []struct { + Name string + Args args + Group *Group + FieldID FieldID + Expected *Field + }{ + { + Name: "nil group", + }, + { + Name: "nil ps", + Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), + }, + { + Name: "invalid property field", + Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), + Args: args{ + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/bb")).Groups(NewSchemaGroupList([]*SchemaGroup{sg2})).MustBuild(), + Value: ValueTypeString.ValueFrom("abc"), + }, + }, + { + Name: "ok", + Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), + Args: args{ + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), + Value: ValueTypeString.ValueFrom("abc"), + }, + Expected: &Field{field: "aa", v: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.Expected, tt.Group.RepresentativeField(tt.Args.Schema)) + }) + } +} diff --git a/server/pkg/property/id.go b/server/pkg/property/id.go new file mode 100644 index 000000000..25fdd7911 --- /dev/null +++ b/server/pkg/property/id.go @@ -0,0 +1,56 @@ +package property + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +type ID = id.PropertyID +type ItemID = id.PropertyItemID +type FieldID = id.PropertyFieldID +type SchemaID = id.PropertySchemaID +type SchemaGroupID = id.PropertySchemaGroupID +type DatasetID = id.DatasetID +type DatasetFieldID = id.DatasetFieldID +type DatasetSchemaID = id.DatasetSchemaID +type SceneID = id.SceneID + +var NewID = id.NewPropertyID +var NewItemID = id.NewPropertyItemID +var NewSchemaID = id.NewPropertySchemaID +var NewDatasetID = id.NewDatasetID +var NewDatasetFieldID = id.NewDatasetFieldID +var NewDatasetSchemaID = id.NewDatasetSchemaID +var NewSceneID = id.NewSceneID + +var MustID = id.MustPropertyID +var MustItemID = id.MustPropertyItemID +var MustSchemaID = id.MustPropertySchemaID +var MustDatasetID = id.MustDatasetID +var MustDatasetFieldID = id.MustDatasetFieldID +var MustDatasetSchemaID = id.MustDatasetSchemaID +var MustSceneID = id.MustSceneID + +var IDFrom = id.PropertyIDFrom +var ItemIDFrom = id.PropertyItemIDFrom +var SchemaIDFrom = id.PropertySchemaIDFrom +var DatasetIDFrom = id.DatasetIDFrom +var DatasetFieldIDFrom = id.DatasetFieldIDFrom +var DatasetSchemaIDFrom = id.DatasetSchemaIDFrom +var SceneIDFrom = id.SceneIDFrom + +var IDFromRef = id.PropertyIDFromRef +var ItemIDFromRef = id.PropertyItemIDFromRef +var SchemaIDFromRef = id.PropertySchemaIDFromRef +var DatasetIDFromRef = id.DatasetIDFromRef +var DatasetFieldIDFromRef = id.DatasetFieldIDFromRef +var DatasetSchemaIDFromRef = id.DatasetSchemaIDFromRef +var SceneIDFromRef = id.SceneIDFromRef + +type IDSet = id.PropertyIDSet +type IDList = id.PropertyIDList +type ItemIDSet = id.PropertyItemIDSet + +var NewIDSet = id.NewPropertyIDSet +var NewItemIDSet = id.NewPropertyItemIDSet + +var ErrInvalidID = id.ErrInvalidID diff --git a/server/pkg/property/id_test.go b/server/pkg/property/id_test.go new file mode 100644 index 000000000..137bb5954 --- /dev/null +++ b/server/pkg/property/id_test.go @@ -0,0 +1,9 @@ +package property + +func mockNewItemID(id ItemID) func() { + original := NewItemID + NewItemID = func() ItemID { return id } + return func() { + NewItemID = original + } +} diff --git a/server/pkg/property/initializer.go b/server/pkg/property/initializer.go new file mode 100644 index 000000000..b66882648 --- /dev/null +++ b/server/pkg/property/initializer.go @@ -0,0 +1,311 @@ +//go:generate go run github.com/globusdigital/deep-copy --type Initializer --pointer-receiver -o initializer_gen.go . + +package property + +import ( + "errors" +) + +var ErrSchemaDoesNotMatch = errors.New("schema of the initializer does not match schema of the argument") + +type Initializer struct { + ID *ID `json:"id"` + Schema SchemaID `json:"schema"` + Items []*InitializerItem `json:"items"` +} + +func (p *Initializer) Clone() *Initializer { + if p == nil { + return nil + } + + var items []*InitializerItem + if p.Items != nil { + items = make([]*InitializerItem, 0, len(p.Items)) + for _, i := range p.Items { + items = append(items, i.Clone()) + } + } + + return &Initializer{ + ID: p.ID.CopyRef(), + Schema: p.Schema, + Items: items, + } +} + +func (p *Initializer) Property(scene SceneID) (*Property, error) { + if p == nil { + return nil, nil + } + + i := p.ID + if i == nil { + i = NewID().Ref() + } + + var items []Item + if p.Items != nil { + items = make([]Item, 0, len(p.Items)) + for _, i := range p.Items { + item, err := i.PropertyItem() + if err != nil { + return nil, err + } + items = append(items, item) + } + } + + return New().ID(*i).Schema(p.Schema).Scene(scene).Items(items).Build() +} + +// PropertyIncludingEmpty generates a new property, but even if the initializer is empty, an empty property will be generated. +func (p *Initializer) PropertyIncludingEmpty(scene SceneID, schema SchemaID) (*Property, error) { + if p != nil && p.Schema != schema { + return nil, ErrSchemaDoesNotMatch + } + + pr, err := p.Property(scene) + if err != nil { + return nil, err + } + + if pr == nil { + pr, err = New().NewID().Schema(schema).Scene(scene).Build() + if err != nil { + return nil, err + } + } + + return pr, nil +} + +func (p *Initializer) MustBeProperty(scene SceneID) *Property { + r, err := p.Property(scene) + if err != nil { + panic(err) + } + return r +} + +type InitializerItem struct { + ID *ItemID `json:"id"` + SchemaItem SchemaGroupID `json:"schemaItem"` + Groups []*InitializerGroup `json:"groups"` + Fields []*InitializerField `json:"fields"` +} + +func (p *InitializerItem) Clone() *InitializerItem { + if p == nil { + return nil + } + + var groups []*InitializerGroup + if p.Groups != nil { + groups = make([]*InitializerGroup, 0, len(p.Groups)) + for _, g := range p.Groups { + groups = append(groups, g.Clone()) + } + } + + var fields []*InitializerField + if p.Fields != nil { + fields = make([]*InitializerField, 0, len(p.Fields)) + for _, f := range p.Fields { + fields = append(fields, f.Clone()) + } + } + + return &InitializerItem{ + ID: p.ID.CopyRef(), + SchemaItem: p.SchemaItem, + Groups: groups, + Fields: fields, + } +} + +func (p *InitializerItem) PropertyItem() (Item, error) { + if p == nil { + return nil, nil + } + + i := p.ID + if i == nil { + i = NewItemID().Ref() + } + + pi := NewItem().ID(*i).SchemaGroup(p.SchemaItem) + + if p.Groups != nil { + groups := make([]*Group, 0, len(p.Groups)) + for _, g := range p.Groups { + g2, err := g.PropertyGroup(p.SchemaItem) + if err != nil { + return nil, err + } + if g2 != nil { + groups = append(groups, g2) + } + } + + return pi.GroupList().Groups(groups).Build() + } + + var fields []*Field + if p.Fields != nil { + fields = make([]*Field, 0, len(p.Fields)) + for _, f := range p.Fields { + if f2 := f.PropertyField(); f2 != nil { + fields = append(fields, f2) + } + } + } + + return pi.Group().Fields(fields).Build() +} + +func (p *InitializerItem) PropertyGroupList() *GroupList { + i, _ := p.PropertyItem() + if g := ToGroupList(i); g != nil { + return g + } + return nil +} + +func (p *InitializerItem) PropertyGroup() *Group { + i, _ := p.PropertyItem() + if g := ToGroup(i); g != nil { + return g + } + return nil +} + +type InitializerGroup struct { + ID *ItemID `json:"id"` + Fields []*InitializerField `json:"fields"` +} + +func (p *InitializerGroup) Clone() *InitializerGroup { + if p == nil { + return nil + } + + var fields []*InitializerField + if p.Fields != nil { + fields = make([]*InitializerField, 0, len(p.Fields)) + for _, f := range p.Fields { + fields = append(fields, f.Clone()) + } + } + + return &InitializerGroup{ + ID: p.ID.CopyRef(), + Fields: fields, + } +} + +func (p *InitializerGroup) PropertyGroup(parentItem SchemaGroupID) (*Group, error) { + if p == nil { + return nil, nil + } + + i := p.ID + if i == nil { + i = NewItemID().Ref() + } + + pi := NewItem().ID(*i).SchemaGroup(parentItem) + + var fields []*Field + if p.Fields != nil { + fields = make([]*Field, 0, len(p.Fields)) + for _, f := range p.Fields { + if f2 := f.PropertyField(); f2 != nil { + fields = append(fields, f2) + } + } + } + + return pi.Group().Fields(fields).Build() +} + +type InitializerField struct { + Field FieldID `json:"field"` + Type ValueType `json:"type"` + Value *Value `json:"value"` + Links []*InitializerLink `json:"links"` +} + +func (p *InitializerField) Clone() *InitializerField { + if p == nil { + return nil + } + + var links []*InitializerLink + if p.Links != nil { + links = make([]*InitializerLink, 0, len(p.Links)) + for _, l := range p.Links { + links = append(links, l.Clone()) + } + } + + return &InitializerField{ + Field: p.Field, + Type: p.Type, + Value: p.Value.Clone(), + Links: links, + } +} + +func (p *InitializerField) PropertyField() *Field { + if p == nil || p.Field == "" || p.Type == "" { + return nil + } + + var plinks *Links + if p.Links != nil { + links := make([]*Link, 0, len(p.Links)) + for _, l := range p.Links { + link := l.PropertyLink() + if link != nil { + links = append(links, link) + } + } + plinks = NewLinks(links) + } + + return NewField(p.Field). + Value(NewOptionalValue(p.Type, p.Value.Clone())). + Links(plinks). + Build() +} + +type InitializerLink struct { + Dataset *DatasetID `json:"dataset"` + Schema DatasetSchemaID `json:"schema"` + Field DatasetFieldID `json:"field"` +} + +func (p *InitializerLink) Clone() *InitializerLink { + if p == nil { + return nil + } + + return &InitializerLink{ + Dataset: p.Dataset.CopyRef(), + Schema: p.Schema, + Field: p.Field, + } +} + +func (p *InitializerLink) PropertyLink() *Link { + if p == nil { + return nil + } + + if p.Dataset == nil { + return NewLinkFieldOnly(p.Schema, p.Field) + } + + return NewLink(*p.Dataset, p.Schema, p.Field) +} diff --git a/server/pkg/property/initializer_test.go b/server/pkg/property/initializer_test.go new file mode 100644 index 000000000..efa2f8ed4 --- /dev/null +++ b/server/pkg/property/initializer_test.go @@ -0,0 +1,289 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestInitializer_Clone(t *testing.T) { + initializer := &Initializer{ + ID: NewID().Ref(), + Schema: MustSchemaID("reearth/marker"), + Items: []*InitializerItem{{ + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), + }}, + } + + cloned := initializer.Clone() + + assert.NotSame(t, cloned, initializer) + assert.NotSame(t, cloned.Items, initializer.Items) + assert.NotSame(t, cloned.Items[0], initializer.Items[0]) + assert.Equal(t, cloned, initializer) +} + +func TestInitializer_Property(t *testing.T) { + sid := NewSceneID() + initializer := &Initializer{ + ID: NewID().Ref(), + Schema: MustSchemaID("reearth/marker"), + Items: []*InitializerItem{{ + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), + }}, + } + + expected := New().ID(*initializer.ID).Schema(initializer.Schema).Scene(sid).Items([]Item{ + NewItem().ID(*initializer.Items[0].ID).SchemaGroup(initializer.Items[0].SchemaItem).Group().MustBuild(), + }).MustBuild() + + actual, err := initializer.Property(sid) + assert.NoError(t, err) + assert.Equal(t, expected, actual) + + // test if a new id is generated + initializer.ID = nil + actual, err = initializer.Property(sid) + assert.NoError(t, err) + assert.False(t, actual.ID().IsEmpty()) +} + +func TestInitializer_PropertyIncludingEmpty(t *testing.T) { + sid := NewSceneID() + psid := MustSchemaID("reearth/hoge") + psid2 := MustSchemaID("reearth/marker") + + // test case 1: should generate an empty property + var initializer *Initializer + actual, err := initializer.PropertyIncludingEmpty(sid, psid) + expected := New().ID(actual.ID()).Schema(psid).Scene(sid).MustBuild() + assert.NoError(t, err) + assert.Equal(t, expected, actual) + + // test case 2: should returns an error when schema does not match + initializer = &Initializer{ + ID: NewID().Ref(), + Schema: psid2, + Items: []*InitializerItem{{ + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), + }}, + } + + _, err = initializer.PropertyIncludingEmpty(sid, psid) + assert.Equal(t, ErrSchemaDoesNotMatch, err) + + // test case 3: should generates a property normally + actual, err = initializer.PropertyIncludingEmpty(sid, psid2) + expected = New().ID(actual.ID()).Schema(initializer.Schema).Scene(sid).Items([]Item{ + NewItem().ID(*initializer.Items[0].ID).SchemaGroup(initializer.Items[0].SchemaItem).Group().MustBuild(), + }).MustBuild() + assert.NoError(t, err) + assert.Equal(t, expected, actual) +} + +func TestInitializerItem_Clone(t *testing.T) { + item := &InitializerItem{ + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), + Groups: []*InitializerGroup{{ + ID: NewItemID().Ref(), + Fields: []*InitializerField{{ + Field: FieldID("name"), + Type: ValueTypeString, + Value: ValueTypeString.ValueFrom("aaa"), + Links: []*InitializerLink{{ + Dataset: NewDatasetID().Ref(), + Schema: NewDatasetSchemaID(), + Field: NewDatasetFieldID(), + }}, + }}, + }}, + } + + cloned := item.Clone() + + assert.NotSame(t, cloned, item) + assert.NotSame(t, cloned.Groups, item.Groups) + assert.NotSame(t, cloned.Groups[0], item.Groups[0]) + assert.NotSame(t, cloned.Groups[0].Fields, item.Groups[0].Fields) + assert.NotSame(t, cloned.Groups[0].Fields[0], item.Groups[0].Fields[0]) + assert.Equal(t, cloned, item) +} + +func TestInitializerItem_PropertyItem(t *testing.T) { + item := &InitializerItem{ + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), + } + + expected := NewItem().ID(*item.ID).SchemaGroup(item.SchemaItem).Group().MustBuild() + + created, err := item.PropertyItem() + assert.NoError(t, err) + assert.Equal(t, expected, created) + + item.ID = nil + created, err = item.PropertyItem() + assert.NoError(t, err) + assert.False(t, created.ID().IsEmpty()) +} + +func TestInitializerItem_PropertyGroup(t *testing.T) { + item := &InitializerItem{ + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), + Fields: []*InitializerField{{ + Field: FieldID("name"), + Type: ValueTypeString, + Value: ValueTypeString.ValueFrom("aaa"), + }}, + } + + expected := NewItem().ID(*item.ID).SchemaGroup(item.SchemaItem).Group().Fields([]*Field{ + NewField(item.Fields[0].Field). + Value(NewOptionalValue(item.Fields[0].Type, item.Fields[0].Value)). + MustBuild(), + }).MustBuild() + + assert.Equal(t, expected, item.PropertyGroup()) + + // check if a new id is generated + item.ID = nil + assert.False(t, item.PropertyGroup().ID().IsEmpty()) +} + +func TestInitializerItem_PropertyGroupList(t *testing.T) { + item := &InitializerItem{ + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), + Groups: []*InitializerGroup{{ + ID: NewItemID().Ref(), + }}, + } + + expected := NewItem().ID(*item.ID).SchemaGroup(item.SchemaItem).GroupList().Groups([]*Group{ + NewItem().ID(*item.Groups[0].ID).SchemaGroup(item.SchemaItem).Group().MustBuild(), + }).MustBuild() + + assert.Equal(t, expected, item.PropertyGroupList()) + + // check if a new id is generated + item.ID = nil + assert.False(t, item.PropertyGroupList().ID().IsEmpty()) +} + +func TestInitializerGroup_Clone(t *testing.T) { + item := &InitializerGroup{ + ID: NewItemID().Ref(), + Fields: []*InitializerField{{ + Field: FieldID("name"), + Type: ValueTypeString, + Value: ValueTypeString.ValueFrom("aaa"), + Links: []*InitializerLink{{ + Dataset: NewDatasetID().Ref(), + Schema: NewDatasetSchemaID(), + Field: NewDatasetFieldID(), + }}, + }}, + } + + cloned := item.Clone() + + assert.NotSame(t, cloned, item) + assert.NotSame(t, cloned.Fields, item.Fields) + assert.NotSame(t, cloned.Fields[0], item.Fields[0]) + assert.Equal(t, cloned, item) +} + +func TestInitializerGroup_PropertyGroup(t *testing.T) { + parentItem := SchemaGroupID("hoge") + item := &InitializerGroup{ + ID: NewItemID().Ref(), + Fields: []*InitializerField{{ + Field: FieldID("name"), + Type: ValueTypeString, + Value: ValueTypeString.ValueFrom("aaa"), + }}, + } + + expected := NewItem().ID(*item.ID).SchemaGroup(parentItem).Group().Fields([]*Field{ + NewField(item.Fields[0].Field). + Value(NewOptionalValue(item.Fields[0].Type, item.Fields[0].Value)). + MustBuild(), + }).MustBuild() + + p, err := item.PropertyGroup(parentItem) + assert.NoError(t, err) + assert.Equal(t, expected, p) + + // check if a new id is generated + item.ID = nil + p, err = item.PropertyGroup(parentItem) + assert.NoError(t, err) + assert.False(t, p.ID().IsEmpty()) +} + +func TestInitializerField_Clone(t *testing.T) { + field := &InitializerField{ + Field: FieldID("name"), + Type: ValueTypeString, + Value: ValueTypeString.ValueFrom("aaa"), + Links: []*InitializerLink{{ + Dataset: NewDatasetID().Ref(), + Schema: NewDatasetSchemaID(), + Field: NewDatasetFieldID(), + }}, + } + cloned := field.Clone() + + assert.NotSame(t, cloned, field) + assert.NotSame(t, cloned.Links, field.Links) + assert.Equal(t, cloned, field) +} + +func TestInitializerField_PropertyField(t *testing.T) { + field := &InitializerField{ + Field: FieldID("name"), + Type: ValueTypeString, + Value: ValueTypeString.ValueFrom("aaa"), + Links: []*InitializerLink{{ + Dataset: NewDatasetID().Ref(), + Schema: NewDatasetSchemaID(), + Field: NewDatasetFieldID(), + }}, + } + + expected := NewField(field.Field). + Value(NewOptionalValue(field.Type, field.Value)). + Links(NewLinks([]*Link{NewLink(*field.Links[0].Dataset.CopyRef(), field.Links[0].Schema, field.Links[0].Field)})). + MustBuild() + + assert.Equal(t, expected, field.PropertyField()) +} + +func TestInitializerLink_Clone(t *testing.T) { + link := &InitializerLink{ + Dataset: NewDatasetID().Ref(), + Schema: NewDatasetSchemaID(), + Field: NewDatasetFieldID(), + } + cloned := link.Clone() + + assert.NotSame(t, cloned, link) + assert.Equal(t, cloned, link) +} + +func TestInitializerLink_PropertyLink(t *testing.T) { + link := &InitializerLink{ + Dataset: NewDatasetID().Ref(), + Schema: NewDatasetSchemaID(), + Field: NewDatasetFieldID(), + } + + expected := NewLink(*link.Dataset.CopyRef(), link.Schema, link.Field) + + assert.Equal(t, expected, link.PropertyLink()) +} diff --git a/server/pkg/property/item.go b/server/pkg/property/item.go new file mode 100644 index 000000000..377d52ceb --- /dev/null +++ b/server/pkg/property/item.go @@ -0,0 +1,66 @@ +package property + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/dataset" +) + +type Item interface { + ID() ItemID + IDRef() *ItemID + SchemaGroup() SchemaGroupID + SchemaGroupRef() *SchemaGroupID + HasLinkedField() bool + Datasets() []DatasetID + FieldsByLinkedDataset(DatasetSchemaID, DatasetID) []*Field + IsDatasetLinked(DatasetSchemaID, DatasetID) bool + IsEmpty() bool + Prune() bool + MigrateSchema(context.Context, *Schema, dataset.Loader) + MigrateDataset(DatasetMigrationParam) + ValidateSchema(*SchemaGroup) error + Fields(*Pointer) []*Field + RemoveFields(*Pointer) bool + CloneItem() Item + GroupAndFields(*Pointer) []GroupAndField + GuessSchema() *SchemaGroup +} + +type itemBase struct { + ID ItemID + SchemaGroup SchemaGroupID +} + +func ToGroup(i Item) *Group { + g, _ := i.(*Group) + return g +} + +func ToGroupList(i Item) *GroupList { + g, _ := i.(*GroupList) + return g +} + +func InitItemFrom(psg *SchemaGroup) Item { + if psg == nil { + return nil + } + if psg.IsList() { + return InitGroupListFrom(psg) + } + return InitGroupFrom(psg) +} + +type GroupAndField struct { + ParentGroup *GroupList + Group *Group + Field *Field +} + +func (f GroupAndField) SchemaFieldPointer() SchemaFieldPointer { + return SchemaFieldPointer{ + SchemaGroup: f.Group.SchemaGroup(), + Field: f.Field.Field(), + } +} diff --git a/server/pkg/property/item_builder.go b/server/pkg/property/item_builder.go new file mode 100644 index 000000000..64bddc98c --- /dev/null +++ b/server/pkg/property/item_builder.go @@ -0,0 +1,32 @@ +package property + +type ItemBuilder struct { + base itemBase +} + +func NewItem() *ItemBuilder { + return &ItemBuilder{} +} + +func (b *ItemBuilder) Group() *GroupBuilder { + return NewGroup().base(b.base) +} + +func (b *ItemBuilder) GroupList() *GroupListBuilder { + return NewGroupList().base(b.base) +} + +func (b *ItemBuilder) ID(id ItemID) *ItemBuilder { + b.base.ID = id + return b +} + +func (b *ItemBuilder) NewID() *ItemBuilder { + b.base.ID = NewItemID() + return b +} + +func (b *ItemBuilder) SchemaGroup(g SchemaGroupID) *ItemBuilder { + b.base.SchemaGroup = g + return b +} diff --git a/server/pkg/property/item_test.go b/server/pkg/property/item_test.go new file mode 100644 index 000000000..fd4d8b52a --- /dev/null +++ b/server/pkg/property/item_test.go @@ -0,0 +1,119 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestInitItemFrom(t *testing.T) { + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aa").Fields([]*SchemaField{sf}).MustBuild() + sgl := NewSchemaGroup().ID("aa").IsList(true).Fields([]*SchemaField{sf}).MustBuild() + iid := NewItemID() + propertySchemaField1ID := SchemaGroupID("aa") + + tests := []struct { + Name string + SG *SchemaGroup + Expected Item + }{ + { + Name: "nil psg", + }, + { + Name: "init item from group", + SG: sg, + Expected: NewGroup().ID(iid).SchemaGroup(propertySchemaField1ID).MustBuild(), + }, + { + Name: "init item from group list", + SG: sgl, + Expected: NewGroupList().ID(iid).SchemaGroup(propertySchemaField1ID).MustBuild(), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := InitItemFrom(tt.SG) + if res != nil { + assert.Equal(t, tt.Expected.SchemaGroup(), res.SchemaGroup()) + } else { + assert.Nil(t, tt.Expected) + } + }) + } +} + +func TestToGroup(t *testing.T) { + iid := NewItemID() + propertySchemaID := MustSchemaID("xxx~1.1.1/aa") + propertySchemaField1ID := FieldID("a") + propertySchemaGroup1ID := SchemaGroupID("A") + il := []Item{ + NewGroup().ID(iid).SchemaGroup(propertySchemaGroup1ID). + Fields([]*Field{ + NewField(propertySchemaField1ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). + MustBuild(), + }).MustBuild(), + } + p := New().NewID().Scene(NewSceneID()).Items(il).Schema(propertySchemaID).MustBuild() + g := ToGroup(p.ItemBySchema(propertySchemaGroup1ID)) + assert.Equal(t, propertySchemaGroup1ID, g.SchemaGroup()) + assert.Equal(t, iid, g.ID()) +} + +func TestToGroupList(t *testing.T) { + iid := NewItemID() + propertySchemaID := MustSchemaID("xxx~1.1.1/aa") + propertySchemaGroup1ID := SchemaGroupID("A") + il := []Item{ + NewGroupList().ID(iid).SchemaGroup(propertySchemaGroup1ID).MustBuild(), + } + p := New().NewID().Scene(NewSceneID()).Items(il).Schema(propertySchemaID).MustBuild() + g := ToGroupList(p.ItemBySchema(propertySchemaGroup1ID)) + assert.Equal(t, propertySchemaGroup1ID, g.SchemaGroup()) + assert.Equal(t, iid, g.ID()) +} + +func TestGroupAndField_SchemaFieldPointer(t *testing.T) { + tests := []struct { + name string + target GroupAndField + want SchemaFieldPointer + }{ + { + name: "group", + target: GroupAndField{ + ParentGroup: nil, + Group: testGroup1, + Field: testField1, + }, + want: SchemaFieldPointer{ + SchemaGroup: testGroup1.SchemaGroup(), + Field: testField1.Field(), + }, + }, + { + name: "group list", + target: GroupAndField{ + ParentGroup: testGroupList1, + Group: testGroup2, + Field: testField2, + }, + want: SchemaFieldPointer{ + SchemaGroup: testGroup2.SchemaGroup(), + Field: testField2.Field(), + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.SchemaFieldPointer()) + }) + } +} diff --git a/server/pkg/property/link.go b/server/pkg/property/link.go new file mode 100644 index 000000000..86f24901a --- /dev/null +++ b/server/pkg/property/link.go @@ -0,0 +1,406 @@ +package property + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/dataset" +) + +type Links struct { + links []*Link +} + +type Link struct { + dataset *DatasetID + schema *DatasetSchemaID + field *DatasetFieldID +} + +func NewLinks(links []*Link) *Links { + if links == nil { + return nil + } + links2 := make([]*Link, 0, len(links)) + for _, l := range links { + links2 = append(links2, l.Clone()) + } + return &Links{ + links: links2, + } +} + +func (l *Links) Clone() *Links { + if l == nil { + return nil + } + return &Links{ + links: append([]*Link{}, l.links...), + } +} + +func (l *Links) IsLinked() bool { + return l != nil && l.links != nil && len(l.links) > 0 +} + +func (l *Links) IsLinkedFully() bool { + return l != nil && l.links != nil && len(l.links) > 0 && len(l.DatasetIDs()) == len(l.links) +} + +func (l *Links) Len() int { + if l == nil || l.links == nil { + return 0 + } + return len(l.links) +} + +func (l *Links) First() *Link { + if l == nil || l.links == nil || len(l.links) == 0 { + return nil + } + return l.links[0] +} + +func (l *Links) Last() *Link { + if l == nil || l.links == nil || len(l.links) == 0 { + return nil + } + return l.links[len(l.links)-1] +} + +func (l *Links) LastValue(ds *dataset.Dataset) *dataset.Value { + return l.Last().Value(ds) +} + +func (l *Links) Validate(dsm dataset.SchemaMap, dm dataset.Map) bool { + if l == nil || l.links == nil { + return false + } + firstDatasetSchema := l.First().DatasetSchema() + if firstDatasetSchema == nil { + return false + } + fields := l.DatasetSchemaFieldIDs() + if fields == nil || len(fields) != len(l.links) { + return false + } + firstDataset := l.First().Dataset() + + res, resf := dsm.GraphSearchByFields(*firstDatasetSchema, fields...) + if len(res) != len(l.links) || resf == nil { + return false + } + + if firstDataset != nil { + res2, resf2 := dm.GraphSearchByFields(*firstDataset, fields...) + return len(res2) == len(l.links) && resf2 != nil + } + + return true +} + +func (l *Links) Replace( + dsm map[DatasetSchemaID]DatasetSchemaID, + dm map[DatasetID]DatasetID, + fm map[DatasetFieldID]DatasetFieldID, +) { + if l == nil || l.links == nil { + return + } + + links := make([]*Link, 0, len(l.links)) + + for _, link := range l.links { + nl := &Link{} + + if link.schema != nil { + if nds, ok := dsm[*link.schema]; ok { + nid := nds + nl.schema = &nid + } else { + // Datasetใฏๅ…จใฆIDใŒๅ†ๅ‰ฒใ‚Šๅฝ“ใฆใ•ใ‚Œใ‚‹ใŸใ‚ใ€ใƒชใƒณใ‚ฏใŒ้€”ๅˆ‡ใ‚Œใฆใ„ใ‚‹ใ“ใจใซใชใ‚‹ + // ใ‚ˆใฃใฆใƒชใƒณใ‚ฏ่‡ชไฝ“ใŒ็„กๅŠนใซใชใ‚‹ + l.links = nil + return + } + } + + if link.dataset != nil { + if nds, ok := dm[*link.dataset]; ok { + nid := nds + nl.dataset = &nid + } else { + // Datasetใฏๅ…จใฆIDใŒๅ†ๅ‰ฒใ‚Šๅฝ“ใฆใ•ใ‚Œใ‚‹ใŸใ‚ใ€ใƒชใƒณใ‚ฏใŒ้€”ๅˆ‡ใ‚Œใฆใ„ใ‚‹ใ“ใจใซใชใ‚‹ + // ใ‚ˆใฃใฆใƒชใƒณใ‚ฏ่‡ชไฝ“ใŒ็„กๅŠนใซใชใ‚‹ + l.links = nil + return + } + } + + if link.field != nil { + if nf, ok := fm[*link.field]; ok { + nid := nf + nl.field = &nid + } else { + // Datasetใฏๅ…จใฆIDใŒๅ†ๅ‰ฒใ‚Šๅฝ“ใฆใ•ใ‚Œใ‚‹ใŸใ‚ใ€ใƒชใƒณใ‚ฏใŒ้€”ๅˆ‡ใ‚Œใฆใ„ใ‚‹ใ“ใจใซใชใ‚‹ + // ใ‚ˆใฃใฆใƒชใƒณใ‚ฏ่‡ชไฝ“ใŒ็„กๅŠนใซใชใ‚‹ + l.links = nil + return + } + } + + links = append(links, nl) + } + + l.links = links +} + +func (l *Links) Links() []*Link { + if l == nil || l.links == nil || len(l.links) == 0 { + return nil + } + links2 := make([]*Link, 0, len(l.links)) + for _, l := range l.links { + links2 = append(links2, l.Clone()) + } + return links2 +} + +func (l *Links) DatasetIDs() []DatasetID { + if l == nil { + return nil + } + datasets := make([]DatasetID, 0, len(l.links)) + for _, i := range l.links { + if i.dataset != nil { + datasets = append(datasets, *i.dataset) + } else { + return datasets + } + } + return datasets +} + +func (l *Links) DatasetSchemaIDs() []DatasetSchemaID { + if l == nil { + return nil + } + schemas := make([]DatasetSchemaID, 0, len(l.links)) + for _, i := range l.links { + if i.schema != nil { + schemas = append(schemas, *i.schema) + } else { + return schemas + } + } + return schemas +} + +func (l *Links) HasSchemaAndDataset(s DatasetSchemaID, dsid DatasetID) bool { + if l == nil { + return false + } + for _, id := range l.DatasetSchemaIDs() { + if id == s { + return true + } + } + for _, id := range l.DatasetIDs() { + if id == dsid { + return true + } + } + return false +} + +func (l *Links) DatasetSchemaFieldIDs() []DatasetFieldID { + if l == nil { + return nil + } + fields := make([]DatasetFieldID, 0, len(l.links)) + for _, i := range l.links { + if i.field != nil { + fields = append(fields, *i.field) + } else { + return fields + } + } + return fields +} + +func (l *Links) HasDataset(did DatasetID) bool { + if l == nil { + return false + } + for _, l2 := range l.links { + if l2 != nil && l2.dataset != nil && *l2.dataset == did { + return true + } + } + return false +} + +func (l *Links) HasDatasetSchema(dsid DatasetSchemaID) bool { + if l == nil { + return false + } + for _, l2 := range l.links { + if l2 != nil && l2.schema != nil && *l2.schema == dsid { + return true + } + } + return false +} + +func (l *Links) HasDatasetSchemaAndDataset(dsid DatasetSchemaID, did DatasetID) bool { + if l == nil { + return false + } + for _, l2 := range l.links { + if l2 != nil && (l2.schema != nil && *l2.schema == dsid || l2.dataset != nil && *l2.dataset == did) { + return true + } + } + return false +} + +func NewLink(d DatasetID, ds DatasetSchemaID, f DatasetFieldID) *Link { + dataset := d + schema := ds + field := f + return &Link{ + dataset: &dataset, + schema: &schema, + field: &field, + } +} + +func NewLinkFieldOnly(ds DatasetSchemaID, f DatasetFieldID) *Link { + schema := ds + field := f + return &Link{ + schema: &schema, + field: &field, + } +} + +func (l *Link) Dataset() *DatasetID { + if l == nil { + return nil + } + return l.dataset.CopyRef() +} + +func (l *Link) DatasetSchema() *DatasetSchemaID { + if l == nil { + return nil + } + return l.schema.CopyRef() +} + +func (l *Link) DatasetSchemaField() *DatasetFieldID { + if l == nil { + return nil + } + return l.field.CopyRef() +} + +func (l *Link) Value(ds *dataset.Dataset) *dataset.Value { + if l == nil || ds == nil || l.dataset == nil || l.field == nil || ds.ID() != *l.dataset { + return nil + } + f := ds.Field(*l.field) + if f == nil { + return nil + } + return f.Value() +} + +func (l *Link) Validate(dss *dataset.Schema, ds *dataset.Dataset) bool { + if l == nil || l.field == nil || l.schema == nil || dss == nil { + return false + } + + // DS + if dss.ID() != *l.schema { + return false + } + if f := dss.Field(*l.field); f == nil { + return false + } + + // D + if l.dataset != nil { + if ds == nil || ds.ID() != *l.dataset || ds.Schema() != dss.ID() { + return false + } + if f := ds.Field(*l.field); f == nil { + return false + } + } + + return true +} + +func (l *Links) IsEmpty() bool { + return l == nil || l.links == nil || len(l.links) == 0 +} + +func (l *Link) Clone() *Link { + if l == nil { + return nil + } + return &Link{ + dataset: l.Dataset(), + schema: l.DatasetSchema(), + field: l.DatasetSchemaField(), + } +} + +func (l *Link) ApplyDataset(ds *DatasetID) *Link { + if l == nil { + return nil + } + // if dataset is already set, it will not be overriden + if ds == nil || l.Dataset() != nil { + return l.Clone() + } + return &Link{ + dataset: ds.CopyRef(), + schema: l.DatasetSchema(), + field: l.DatasetSchemaField(), + } +} + +func (l *Links) ApplyDataset(ds *DatasetID) *Links { + if l == nil || l.links == nil || len(l.links) == 0 { + return nil + } + + links := l.Clone() + first := links.First() + // if dataset is already set, it will not be overriden + if ds == nil || first.Dataset() != nil { + return links + } + + links.links[0] = first.ApplyDataset(ds) + return links +} + +func (l *Links) DatasetValue(ctx context.Context, d dataset.GraphLoader) (*dataset.Value, error) { + if l == nil || d == nil { + return nil, nil + } + dsid := l.First().Dataset() + dsfid := l.DatasetSchemaFieldIDs() + if dsid != nil && dsfid != nil { + _, dsf, err := d(ctx, *dsid, dsfid...) + if err != nil { + return nil, err + } + if dsf != nil { + return dsf.Value(), nil + } + } + return nil, nil +} diff --git a/server/pkg/property/link_test.go b/server/pkg/property/link_test.go new file mode 100644 index 000000000..b64a97824 --- /dev/null +++ b/server/pkg/property/link_test.go @@ -0,0 +1,490 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/dataset" + + "github.com/stretchr/testify/assert" +) + +func TestNewLinks(t *testing.T) { + dsid1 := NewDatasetSchemaID() + dsid2 := NewDatasetSchemaID() + did1 := NewDatasetID() + did2 := NewDatasetID() + dfid1 := NewDatasetFieldID() + dfid2 := NewDatasetFieldID() + + var lin *Links + assert.Nil(t, lin) + assert.Nil(t, lin.Clone()) + assert.Nil(t, lin.Links()) + assert.Nil(t, lin.DatasetIDs()) + assert.Nil(t, lin.DatasetSchemaIDs()) + assert.False(t, lin.IsLinked()) + assert.Equal(t, 0, lin.Len()) + + lin = NewLinks([]*Link{}) + assert.Equal(t, []DatasetID{}, lin.DatasetIDs()) + assert.Equal(t, []DatasetSchemaID{}, lin.DatasetSchemaIDs()) + assert.Equal(t, []DatasetFieldID{}, lin.DatasetSchemaFieldIDs()) + + ll := []*Link{ + NewLink(did1, dsid1, dfid1), + NewLink(did2, dsid2, dfid2), + } + dl := []DatasetID{did1, did2} + dsl := []DatasetSchemaID{dsid1, dsid2} + dsfl := []DatasetFieldID{dfid1, dfid2} + lin = NewLinks(ll) + assert.NotNil(t, lin) + assert.Equal(t, ll, lin.Links()) + assert.Equal(t, ll, lin.Clone().Links()) + assert.Equal(t, dl, lin.DatasetIDs()) + assert.Equal(t, dsl, lin.DatasetSchemaIDs()) + assert.Equal(t, dsfl, lin.DatasetSchemaFieldIDs()) + assert.True(t, lin.IsLinked()) + assert.Equal(t, 2, lin.Len()) +} + +func TestLinks_HasSchemaAndDataset(t *testing.T) { + dsid1 := NewDatasetSchemaID() + dsid2 := NewDatasetSchemaID() + did1 := NewDatasetID() + did2 := NewDatasetID() + dfid1 := NewDatasetFieldID() + ll := []*Link{ + NewLink(did1, dsid1, dfid1), + } + + tests := []struct { + Name string + DSS DatasetSchemaID + DS DatasetID + Links *Links + Expected bool + }{ + { + Name: "nil links", + Expected: false, + }, + { + Name: "true", + DSS: dsid1, + DS: did1, + Links: NewLinks(ll), + Expected: true, + }, + { + Name: "false", + DSS: dsid2, + DS: did2, + Links: NewLinks(ll), + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.Links.HasSchemaAndDataset(tc.DSS, tc.DS) + res2 := tc.Links.HasDataset(tc.DS) + res3 := tc.Links.HasDatasetSchema(tc.DSS) + assert.Equal(t, tc.Expected, res) + assert.Equal(t, tc.Expected, res2) + assert.Equal(t, tc.Expected, res3) + }) + } +} + +func TestLinks_Validate(t *testing.T) { + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() + + tests := []struct { + Name string + DSM dataset.SchemaMap + DM dataset.Map + Links *Links + Expected bool + }{ + { + Name: "nil links", + Expected: false, + }, + { + Name: "nil dataset schema for first link", + Links: NewLinks([]*Link{}), + Expected: false, + }, + { + Name: "len(res) != len(l.links)", + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + Expected: false, + }, + { + Name: "success", + DSM: dataset.SchemaMap{ + dsid1: dataset.NewSchema().ID(dsid1).Fields([]*dataset.SchemaField{ + dataset.NewSchemaField(). + ID(dfid1). + Ref(dsid1.Ref()).Type(dataset.ValueTypeString). + MustBuild(), + }).MustBuild(), + }, + DM: dataset.Map{ + did1: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{ + dataset.NewField(dfid1, dataset.ValueTypeString.ValueFrom("vvv"), ""), + }).MustBuild(), + }, + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + Expected: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.Links.Validate(tc.DSM, tc.DM) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestLinks_Replace(t *testing.T) { + dsid1 := NewDatasetSchemaID() + dsid2 := NewDatasetSchemaID() + did1 := NewDatasetID() + did2 := NewDatasetID() + dfid1 := NewDatasetFieldID() + dfid2 := NewDatasetFieldID() + + tests := []struct { + Name string + DSM map[DatasetSchemaID]DatasetSchemaID + DM map[DatasetID]DatasetID + FM map[DatasetFieldID]DatasetFieldID + Expected, Links *Links + }{ + { + Name: "nil links", + }, + { + Name: "success", + DSM: map[DatasetSchemaID]DatasetSchemaID{ + dsid1: dsid2, + }, + DM: map[DatasetID]DatasetID{ + did1: did2, + }, + FM: map[DatasetFieldID]DatasetFieldID{ + dfid1: dfid2, + }, + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + Expected: NewLinks([]*Link{NewLink(did2, dsid2, dfid2)}), + }, + { + Name: "dataset = nil", + DSM: map[DatasetSchemaID]DatasetSchemaID{ + dsid1: dsid2, + }, + DM: map[DatasetID]DatasetID{}, + FM: map[DatasetFieldID]DatasetFieldID{ + dfid1: dfid2, + }, + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + }, + { + Name: "datasetschema = nil", + DSM: map[DatasetSchemaID]DatasetSchemaID{}, + DM: map[DatasetID]DatasetID{ + did1: did2, + }, + FM: map[DatasetFieldID]DatasetFieldID{ + dfid1: dfid2, + }, + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + }, + { + Name: "dataset schema field = nil", + DSM: map[DatasetSchemaID]DatasetSchemaID{ + dsid1: dsid2, + }, + DM: map[DatasetID]DatasetID{ + did1: did2, + }, + FM: map[DatasetFieldID]DatasetFieldID{}, + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + tc.Links.Replace(tc.DSM, tc.DM, tc.FM) + assert.Equal(t, tc.Expected.Links(), tc.Links.Links()) + }) + } +} + +func TestLinks_ApplyDataset(t *testing.T) { + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() + + tests := []struct { + Name string + Input *DatasetID + Expected, Links *Links + }{ + { + Name: "nil links", + }, + { + Name: "nil input dataset", + Links: NewLinks([]*Link{NewLinkFieldOnly(dsid1, dfid1)}), + Expected: NewLinks([]*Link{NewLinkFieldOnly(dsid1, dfid1)}), + }, + { + Name: "not nil dataset", + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + Expected: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + }, + { + Name: "apply new dataset", + Input: did1.Ref(), + Links: NewLinks([]*Link{NewLinkFieldOnly(dsid1, dfid1)}), + Expected: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.Links.ApplyDataset(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestLink_Dataset(t *testing.T) { + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() + + tests := []struct { + Name string + Link *Link + Expected *DatasetID + }{ + { + Name: "nil link", + }, + { + Name: "nil dataset", + Link: NewLinkFieldOnly(dsid1, dfid1), + }, + { + Name: "success", + Link: NewLink(did1, dsid1, dfid1), + Expected: did1.Ref(), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + res := tc.Link.Dataset() + assert.Equal(t, tc.Expected, res) + }) + } + +} + +func TestLink_DatasetSchema(t *testing.T) { + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() + + tests := []struct { + Name string + Link *Link + Expected *DatasetSchemaID + }{ + { + Name: "nil link", + }, + { + Name: "success", + Link: NewLink(did1, dsid1, dfid1), + Expected: dsid1.Ref(), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + res := tc.Link.DatasetSchema() + assert.Equal(t, tc.Expected, res) + }) + } + +} + +func TestLink_DatasetSchemaField(t *testing.T) { + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() + + tests := []struct { + Name string + Link *Link + Expected *DatasetFieldID + }{ + { + Name: "nil link", + }, + { + Name: "success", + Link: NewLink(did1, dsid1, dfid1), + Expected: dfid1.Ref(), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + res := tc.Link.DatasetSchemaField() + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestLink_Value(t *testing.T) { + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() + dsf := []*dataset.Field{ + dataset.NewField(dfid1, dataset.ValueTypeString.ValueFrom("aaa"), ""), + } + + tests := []struct { + Name string + Link *Link + Input *dataset.Dataset + Expected *dataset.Value + }{ + { + Name: "nil link", + }, + { + Name: "success", + Link: NewLink(did1, dsid1, dfid1), + Input: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{}).MustBuild(), + }, + { + Name: "success", + Link: NewLink(did1, dsid1, dfid1), + Input: dataset.New().ID(did1).Schema(dsid1).Fields(dsf).MustBuild(), + Expected: dataset.ValueTypeString.ValueFrom("aaa"), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + res := tc.Link.Value(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestLink_Validate(t *testing.T) { + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() + + tests := []struct { + Name string + DS *dataset.Dataset + DSS *dataset.Schema + Link *Link + Expected bool + }{ + { + Name: "nil links", + Expected: false, + }, + { + Name: "input schema id != link schema", + DS: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{ + dataset.NewField(dfid1, dataset.ValueTypeString.ValueFrom("vvv"), "")}).MustBuild(), + DSS: dataset.NewSchema().NewID().Fields([]*dataset.SchemaField{ + dataset.NewSchemaField(). + ID(dfid1). + Ref(dsid1.Ref()).Type(dataset.ValueTypeString). + MustBuild(), + }).MustBuild(), + Link: NewLink(did1, dsid1, dfid1), + Expected: false, + }, + { + Name: "nil input dataset", + DSS: dataset.NewSchema().ID(dsid1).Fields([]*dataset.SchemaField{ + dataset.NewSchemaField(). + ID(dfid1). + Ref(dsid1.Ref()).Type(dataset.ValueTypeString). + MustBuild(), + }).MustBuild(), + Link: NewLink(did1, dsid1, dfid1), + Expected: false, + }, + { + Name: "nil dataset field", + DS: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{}).MustBuild(), + DSS: dataset.NewSchema().ID(dsid1).Fields([]*dataset.SchemaField{ + dataset.NewSchemaField(). + ID(dfid1). + Ref(dsid1.Ref()).Type(dataset.ValueTypeString). + MustBuild(), + }).MustBuild(), + Link: NewLink(did1, dsid1, dfid1), + Expected: false, + }, + { + Name: "valid", + DS: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{ + dataset.NewField(dfid1, dataset.ValueTypeString.ValueFrom("vvv"), "")}).MustBuild(), + DSS: dataset.NewSchema().ID(dsid1).Fields([]*dataset.SchemaField{ + dataset.NewSchemaField(). + ID(dfid1). + Ref(dsid1.Ref()).Type(dataset.ValueTypeString). + MustBuild(), + }).MustBuild(), + Link: NewLink(did1, dsid1, dfid1), + Expected: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.Link.Validate(tc.DSS, tc.DS) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestLink_Clone(t *testing.T) { + var l *Link + assert.Nil(t, l.Clone()) + l = NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) + assert.Equal(t, l, l.Clone()) +} diff --git a/server/pkg/property/list.go b/server/pkg/property/list.go new file mode 100644 index 000000000..af1cae0a0 --- /dev/null +++ b/server/pkg/property/list.go @@ -0,0 +1,114 @@ +package property + +import ( + "sort" + + "github.com/samber/lo" +) + +type List []*Property + +func (l List) IDs() []ID { + ids := make([]ID, 0, len(l)) + m := map[ID]struct{}{} + for _, p := range l { + s := p.ID() + if _, ok := m[s]; ok { + continue + } + ids = append(ids, s) + m[s] = struct{}{} + } + return ids +} + +func (l List) Schemas() []SchemaID { + schemas := make([]SchemaID, 0, len(l)) + for _, p := range l { + s := p.Schema() + skip := false + for _, ss := range schemas { + if ss.Equal(s) { + skip = true + break + } + } + if skip { + continue + } + schemas = append(schemas, s) + } + return schemas +} + +func (l List) Sort() { + sort.Slice(l, func(i, j int) bool { + return l[i].ID().Compare(l[j].ID()) < 0 + }) +} + +func (l List) Map() Map { + m := make(Map, len(l)) + return m.Add(l...) +} + +type Map map[ID]*Property + +func MapFrom(properties ...*Property) Map { + return Map{}.Add(properties...) +} + +func (m Map) Add(properties ...*Property) Map { + if m == nil { + m = Map{} + } + for _, p := range properties { + if p == nil { + continue + } + m[p.ID()] = p + } + return m +} + +func (m Map) List() List { + if m == nil { + return nil + } + list := make(List, 0, len(m)) + for _, l := range m { + list = append(list, l) + } + return list +} + +func (m Map) Clone() Map { + if m == nil { + return Map{} + } + m2 := make(Map, len(m)) + for k, v := range m { + m2[k] = v + } + return m2 +} + +func (m Map) Merge(m2 Map) Map { + if m == nil { + return m2.Clone() + } + m3 := m.Clone() + if m2 == nil { + return m3 + } + + return m3.Add(m2.List()...) +} + +func (m Map) Keys() []ID { + return IDList(lo.Keys(m)).Sort() +} + +func (m Map) Len() int { + return len(m) +} diff --git a/server/pkg/property/list_test.go b/server/pkg/property/list_test.go new file mode 100644 index 000000000..d2fe403b8 --- /dev/null +++ b/server/pkg/property/list_test.go @@ -0,0 +1,179 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +var ( + sf = NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg = NewSchemaGroup().ID("aa").Fields([]*SchemaField{sf}).MustBuild() + p = New().NewID().Scene(NewSceneID()).Schema(MustSchemaID("xx~1.0.0/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() + p2 = New().NewID().Scene(NewSceneID()).Schema(MustSchemaID("xx~1.0.0/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() +) + +func TestList_IDs(t *testing.T) { + p1 := NewID() + p2 := NewID() + + tests := []struct { + name string + target List + want []ID + }{ + { + name: "ok", + target: List{&Property{id: p1}, &Property{id: p2}, &Property{id: p1}}, + want: []ID{p1, p2}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.IDs()) + }) + } +} + +func TestList_Schemas(t *testing.T) { + ps1 := MustSchemaID("x~1.0.0/a") + ps2 := MustSchemaID("x~1.0.0/b") + + tests := []struct { + name string + target List + want []SchemaID + }{ + { + name: "ok", + target: List{&Property{schema: ps1}, &Property{schema: ps2}, &Property{schema: ps1}}, + want: []SchemaID{ps1, ps2}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Schemas()) + }) + } +} + +func TestList_Map(t *testing.T) { + p1 := NewID() + p2 := NewID() + + tests := []struct { + name string + target List + want Map + }{ + { + name: "ok", + target: List{&Property{id: p1}, &Property{id: p2}, &Property{id: p1}}, + want: Map{ + p1: &Property{id: p1}, + p2: &Property{id: p2}, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Map()) + }) + } +} + +func TestMap_Add(t *testing.T) { + tests := []struct { + Name string + Input *Property + M, Expected Map + }{ + { + Name: "nil map", + }, + { + Name: "add property list", + Input: p, + M: Map{}, + Expected: Map{p.ID(): p}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.M.Add(tt.Input) + assert.Equal(t, tt.Expected, tt.M) + assert.Equal(t, tt.Expected.List(), tt.M.List()) + }) + } +} + +func TestMapFrom(t *testing.T) { + m := MapFrom(p) + e := Map{p.ID(): p} + assert.Equal(t, e, m) +} + +func TestMap_Clone(t *testing.T) { + tests := []struct { + Name string + M, Expected Map + }{ + { + Name: "nil map", + Expected: Map{}, + }, + { + Name: "add property list", + M: Map{p.ID(): p}, + Expected: Map{p.ID(): p}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.M.Clone() + assert.Equal(t, tt.Expected, res) + }) + } +} + +func TestMap_Merge(t *testing.T) { + tests := []struct { + Name string + M1, M2, Expected Map + }{ + { + Name: "nil map", + Expected: Map{}, + }, + { + Name: "add property list", + M1: Map{p.ID(): p}, + M2: Map{p2.ID(): p2}, + Expected: Map{p.ID(): p, p2.ID(): p2}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.M1.Merge(tt.M2) + assert.Equal(t, tt.Expected, res) + }) + } +} diff --git a/server/pkg/property/loader.go b/server/pkg/property/loader.go new file mode 100644 index 000000000..66e7923f2 --- /dev/null +++ b/server/pkg/property/loader.go @@ -0,0 +1,57 @@ +package property + +import ( + "context" +) + +type Loader func(context.Context, ...ID) (List, error) + +type SchemaLoader func(context.Context, ...SchemaID) (SchemaList, error) + +func LoaderFrom(data []*Property) Loader { + return func(ctx context.Context, ids ...ID) (List, error) { + res := make([]*Property, 0, len(ids)) + for _, i := range ids { + found := false + for _, d := range data { + if i == d.ID() { + res = append(res, d) + found = true + break + } + } + if !found { + res = append(res, nil) + } + } + return res, nil + } +} + +func LoaderFromMap(data map[ID]*Property) Loader { + return func(ctx context.Context, ids ...ID) (List, error) { + res := make([]*Property, 0, len(ids)) + for _, i := range ids { + if d, ok := data[i]; ok { + res = append(res, d) + } else { + res = append(res, nil) + } + } + return res, nil + } +} + +func SchemaLoaderFromMap(data map[SchemaID]*Schema) SchemaLoader { + return func(ctx context.Context, ids ...SchemaID) (SchemaList, error) { + res := make([]*Schema, 0, len(ids)) + for _, i := range ids { + if d, ok := data[i]; ok { + res = append(res, d) + } else { + res = append(res, nil) + } + } + return res, nil + } +} diff --git a/server/pkg/property/loader_test.go b/server/pkg/property/loader_test.go new file mode 100644 index 000000000..d29929721 --- /dev/null +++ b/server/pkg/property/loader_test.go @@ -0,0 +1,60 @@ +package property + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLoaderFrom(t *testing.T) { + scene := NewSceneID() + ps := MustSchemaID("xxx~1.1.1/aa") + pid1 := NewID() + pid2 := NewID() + p1 := New().ID(pid1).Scene(scene).Schema(ps).MustBuild() + p2 := New().ID(pid2).Scene(scene).Schema(ps).MustBuild() + pl := LoaderFrom([]*Property{ + p1, + p2, + New().NewID().Scene(scene).Schema(ps).MustBuild(), + }) + res, err := pl(context.Background(), pid1, pid2) + + assert.Equal(t, List{p1, p2}, res) + assert.NoError(t, err) +} + +func TestLoaderFromMap(t *testing.T) { + scene := NewSceneID() + ps := MustSchemaID("xxx~1.1.1/aa") + pid1 := NewID() + pid2 := NewID() + pid3 := NewID() + p1 := New().ID(pid1).Scene(scene).Schema(ps).MustBuild() + p2 := New().ID(pid2).Scene(scene).Schema(ps).MustBuild() + + pl := LoaderFromMap(map[ID]*Property{ + pid1: p1, + pid2: p2, + }) + res, err := pl(context.Background(), pid1, pid3, pid2) + assert.Equal(t, List{p1, nil, p2}, res) + assert.NoError(t, err) +} + +func TestSchemaLoaderFromMap(t *testing.T) { + psid1 := MustSchemaID("xxx~1.1.1/aa") + psid2 := MustSchemaID("xxx~1.1.1/bb") + psid3 := MustSchemaID("xxx~1.1.1/cc") + ps1 := NewSchema().ID(psid1).MustBuild() + ps2 := NewSchema().ID(psid2).MustBuild() + + pl := SchemaLoaderFromMap(map[SchemaID]*Schema{ + psid1: ps1, + psid2: ps2, + }) + res, err := pl(context.Background(), psid1, psid3, psid2) + assert.Equal(t, SchemaList{ps1, nil, ps2}, res) + assert.NoError(t, err) +} diff --git a/server/pkg/property/merged.go b/server/pkg/property/merged.go new file mode 100644 index 000000000..25401e446 --- /dev/null +++ b/server/pkg/property/merged.go @@ -0,0 +1,300 @@ +package property + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/dataset" +) + +// Merged represents a merged property from two properties +type Merged struct { + Original *ID + Parent *ID + Schema SchemaID + LinkedDataset *DatasetID + Groups []*MergedGroup +} + +// MergedGroup represents a group of Merged +type MergedGroup struct { + Original *ItemID + Parent *ItemID + SchemaGroup SchemaGroupID + LinkedDataset *DatasetID + Groups []*MergedGroup + Fields []*MergedField +} + +// MergedField represents a field of Merged +type MergedField struct { + ID FieldID + Type ValueType + Value *Value + Links *Links + Overridden bool +} + +// Datasets returns associated dataset IDs +func (m *Merged) Datasets() []DatasetID { + if m == nil { + return nil + } + ids := []DatasetID{} + for _, g := range m.Groups { + ids = append(ids, g.Datasets()...) + } + return ids +} + +// Datasets returns associated dataset IDs +func (m *MergedGroup) Datasets() []DatasetID { + if m == nil { + return nil + } + ids := []DatasetID{} + for _, f := range m.Fields { + if f == nil { + continue + } + ids = append(ids, f.Links.DatasetIDs()...) + } + return ids +} + +type MergedMetadata struct { + Original *ID + Parent *ID + LinkedDataset *DatasetID +} + +// MergedMetadataFrom generates MergedMetadata from single property +func MergedMetadataFrom(p ID) MergedMetadata { + p2 := p + return MergedMetadata{ + Original: &p2, + } +} + +// Properties returns associated property IDs +func (m MergedMetadata) Properties() []ID { + ids := make([]ID, 0, 2) + if m.Original != nil { + ids = append(ids, *m.Original) + } + if m.Parent != nil { + ids = append(ids, *m.Parent) + } + return ids +} + +// Merge merges two properties +func (m MergedMetadata) Merge(o *Property, p *Property) *Merged { + if m.Original != nil && (o == nil || *m.Original != o.ID()) { + return nil + } + if m.Parent != nil && (p == nil || *m.Parent != p.ID()) { + return nil + } + return Merge(o, p, m.LinkedDataset) +} + +func (f *MergedField) DatasetValue(ctx context.Context, d dataset.GraphLoader) (*dataset.Value, error) { + if f == nil { + return nil, nil + } + return f.Links.DatasetValue(ctx, d) +} + +// Merge merges two properties +func Merge(o *Property, p *Property, linked *DatasetID) *Merged { + if o == nil && p == nil || o != nil && p != nil && !o.Schema().Equal(p.Schema()) { + return nil + } + + var schema SchemaID + if p != nil { + schema = p.Schema() + } else if o != nil { + schema = o.Schema() + } + + return &Merged{ + Original: o.IDRef(), + Parent: p.IDRef(), + Schema: schema, + Groups: mergeItems(o.Items(), p.Items(), linked.CopyRef()), + LinkedDataset: linked.CopyRef(), + } +} + +func mergeItems(i1, i2 []Item, linked *DatasetID) []*MergedGroup { + if i1 == nil && i2 == nil || len(i1) == 0 && len(i2) == 0 { + return nil + } + + consumed := map[ItemID]struct{}{} + groups := []*MergedGroup{} + + for _, item := range i1 { + sgid := item.SchemaGroup() + + var parentItem Item + for _, item2 := range i2 { + if item2.SchemaGroup() == sgid { + parentItem = item2 + consumed[item2.ID()] = struct{}{} + } + } + + if mg := mergeItem(item, parentItem, linked); mg != nil { + groups = append(groups, mg) + } + } + + for _, item := range i2 { + if _, ok := consumed[item.ID()]; ok { + continue + } + + if mg := mergeItem(nil, item, linked); mg != nil { + groups = append(groups, mg) + } + } + + return groups +} + +func groupList(o, p Item) (*GroupList, *GroupList) { + return ToGroupList(o), ToGroupList(p) +} + +func group(o, p Item) (*Group, *Group) { + return ToGroup(o), ToGroup(p) +} + +func mergeItem(o, p Item, linked *DatasetID) *MergedGroup { + if o == nil && p == nil || o != nil && p != nil && o.SchemaGroup() != p.SchemaGroup() { + return nil + } + + var mgroups []*MergedGroup + var mfields []*MergedField + + if og, pg := groupList(o, p); og != nil || pg != nil { + // List merging + var groups []*Group + // if original exists, original is used + if og != nil { + groups = og.Groups() + } else { + groups = pg.Groups() + } + mgroups = make([]*MergedGroup, 0, len(groups)) + for _, gg := range groups { + var mi *MergedGroup + if og != nil { + mi = mergeItem(gg, nil, linked) + } else { + mi = mergeItem(nil, gg, linked) + } + if mi != nil { + mgroups = append(mgroups, mi) + } + } + } else if og, pg := group(o, p); og != nil || pg != nil { + // Group merging + fieldKeys := allFields(og.FieldIDs(), pg.FieldIDs()) + mfields = make([]*MergedField, 0, len(fieldKeys)) + for _, k := range fieldKeys { + mf := mergeField(og.Field(k), pg.Field(k), linked) + if mf != nil { + mfields = append(mfields, mf) + } + } + } + + var oid, pid *ItemID + var sg SchemaGroupID + if o != nil { + oid = o.IDRef() + sg = o.SchemaGroup() + } + if p != nil { + pid = p.IDRef() + sg = p.SchemaGroup() + } + + return &MergedGroup{ + Original: oid, + Parent: pid, + SchemaGroup: sg, + Fields: mfields, + Groups: mgroups, + LinkedDataset: linked, + } +} + +func mergeField(original, parent *Field, linked *DatasetID) *MergedField { + if original == nil && parent == nil || original != nil && parent != nil && (original.Field() != parent.Field() || original.Type() != parent.Type()) { + return nil + } + + var t ValueType + if original != nil { + t = original.Type() + } else if parent != nil { + t = parent.Type() + } + + var fid FieldID + if original != nil { + fid = original.Field() + } else if parent != nil { + fid = parent.Field() + } + + var v *Value + overridden := false + + if original == nil && parent != nil { + // parent value is used + v = parent.Value().Clone() + } else if original != nil { + // overrided value is used + v = original.Value().Clone() + overridden = parent != nil + } + + var links *Links + if l := original.Links(); l != nil { + // original links are used but dataset is overrided + links = l.ApplyDataset(linked) + overridden = parent != nil + } else if l := parent.Links(); l != nil { + // parent links are used and dataset is overrided + links = l.ApplyDataset(linked) + } + + return &MergedField{ + ID: fid, + Value: v, + Type: t, + Links: links, + Overridden: overridden, + } +} + +func allFields(args ...[]FieldID) []FieldID { + consumedKeys := map[FieldID]struct{}{} + result := []FieldID{} + for _, fields := range args { + for _, f := range fields { + if _, ok := consumedKeys[f]; ok { + continue + } + consumedKeys[f] = struct{}{} + result = append(result, f) + } + } + return result +} diff --git a/server/pkg/property/merged_test.go b/server/pkg/property/merged_test.go new file mode 100644 index 000000000..50e7d9043 --- /dev/null +++ b/server/pkg/property/merged_test.go @@ -0,0 +1,429 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestMerge(t *testing.T) { + ds := NewDatasetSchemaID() + df := NewDatasetFieldID() + d := NewDatasetID() + d2 := NewDatasetID() + opid := NewID() + ppid := NewID() + psid := MustSchemaID("hoge~0.1.0/fff") + psid2 := MustSchemaID("hoge~0.1.0/aaa") + psgid1 := SchemaGroupID("group1") + psgid2 := SchemaGroupID("group2") + psgid3 := SchemaGroupID("group3") + psgid4 := SchemaGroupID("group4") + i1id := NewItemID() + i2id := NewItemID() + i3id := NewItemID() + i4id := NewItemID() + i5id := NewItemID() + i6id := NewItemID() + i7id := NewItemID() + i8id := NewItemID() + + fields1 := []*Field{ + NewField(FieldID("a")). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("a"))). + MustBuild(), + NewField(FieldID("b")). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("b"))). + MustBuild(), + NewField(FieldID("e")). + Value(NewOptionalValue(ValueTypeString, nil)). + Links(NewLinks([]*Link{NewLink(d2, ds, df)})). + MustBuild(), + NewField(FieldID("f")). + Value(NewOptionalValue(ValueTypeNumber, nil)). + MustBuild(), + } + + fields2 := []*Field{ + NewField(FieldID("a")). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("1"))). + MustBuild(), + NewField(FieldID("c")). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("2"))). + MustBuild(), + NewField(FieldID("d")). + Value(NewOptionalValue(ValueTypeString, nil)). + Links(NewLinks([]*Link{NewLinkFieldOnly(ds, df)})). + MustBuild(), + NewField(FieldID("f")). + Value(NewOptionalValue(ValueTypeString, nil)). + MustBuild(), + } + + groups1 := []*Group{ + NewGroup().ID(i7id).SchemaGroup(psgid1).Fields(fields1).MustBuild(), + } + + groups2 := []*Group{ + NewGroup().ID(i8id).SchemaGroup(psgid1).Fields(fields2).MustBuild(), + } + + items1 := []Item{ + NewGroupList().ID(i1id).SchemaGroup(psgid1).Groups(groups1).MustBuild(), + NewGroup().ID(i2id).SchemaGroup(psgid2).Fields(fields1).MustBuild(), + NewGroup().ID(i3id).SchemaGroup(psgid3).Fields(fields1).MustBuild(), + } + + items2 := []Item{ + NewGroupList().ID(i4id).SchemaGroup(psgid1).Groups(groups2).MustBuild(), + NewGroup().ID(i5id).SchemaGroup(psgid2).Fields(fields2).MustBuild(), + NewGroup().ID(i6id).SchemaGroup(psgid4).Fields(fields2).MustBuild(), + } + + sid := NewSceneID() + op := New().ID(opid).Scene(sid).Schema(psid).Items(items1).MustBuild() + pp := New().NewID().Scene(sid).Schema(psid2).MustBuild() + pp2 := New().ID(ppid).Scene(sid).Schema(psid).Items(items2).MustBuild() + + // Merge(op, pp2, &d) + expected1 := &Merged{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + Original: &i1id, + Parent: &i4id, + SchemaGroup: psgid1, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + Original: &i7id, + Parent: nil, + SchemaGroup: psgid1, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("a"), + Type: ValueTypeString, + }, + { + ID: FieldID("b"), + Value: ValueTypeString.ValueFrom("b"), + Type: ValueTypeString, + }, + { + ID: FieldID("e"), + Links: NewLinks([]*Link{NewLink(d2, ds, df)}), + Type: ValueTypeString, + }, + { + ID: FieldID("f"), + Type: ValueTypeNumber, + }, + }, + }, + }, + }, + { + Original: &i2id, + Parent: &i5id, + SchemaGroup: psgid2, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("a"), + Type: ValueTypeString, + Overridden: true, + }, + { + ID: FieldID("b"), + Value: ValueTypeString.ValueFrom("b"), + Type: ValueTypeString, + }, + { + ID: FieldID("e"), + Links: NewLinks([]*Link{NewLink(d2, ds, df)}), + Type: ValueTypeString, + }, + { + ID: FieldID("c"), + Value: ValueTypeString.ValueFrom("2"), + Type: ValueTypeString, + }, + { + ID: FieldID("d"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + }, + }, + { + Original: &i3id, + Parent: nil, + SchemaGroup: psgid3, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("a"), + Type: ValueTypeString, + }, + { + ID: FieldID("b"), + Value: ValueTypeString.ValueFrom("b"), + Type: ValueTypeString, + }, + { + ID: FieldID("e"), + Links: NewLinks([]*Link{NewLink(d2, ds, df)}), + Type: ValueTypeString, + }, + { + ID: FieldID("f"), + Type: ValueTypeNumber, + }, + }, + }, + { + Original: nil, + Parent: &i6id, + SchemaGroup: psgid4, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("1"), + Type: ValueTypeString, + }, + { + ID: FieldID("c"), + Value: ValueTypeString.ValueFrom("2"), + Type: ValueTypeString, + }, + { + ID: FieldID("d"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + { + ID: FieldID("f"), + Type: ValueTypeString, + }, + }, + }, + }, + } + + // Merge(op, nil, &d) + expected2 := &Merged{ + Original: opid.Ref(), + Parent: nil, + Schema: psid, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + Original: &i1id, + Parent: nil, + SchemaGroup: psgid1, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + Original: &i7id, + Parent: nil, + SchemaGroup: psgid1, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("a"), + Type: ValueTypeString, + }, + { + ID: FieldID("b"), + Value: ValueTypeString.ValueFrom("b"), + Type: ValueTypeString, + }, + { + ID: FieldID("e"), + Links: NewLinks([]*Link{NewLink(d2, ds, df)}), + Type: ValueTypeString, + }, + { + ID: FieldID("f"), + Type: ValueTypeNumber, + }, + }, + }, + }, + }, + { + Original: &i2id, + Parent: nil, + SchemaGroup: psgid2, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("a"), + Type: ValueTypeString, + }, + { + ID: FieldID("b"), + Value: ValueTypeString.ValueFrom("b"), + Type: ValueTypeString, + }, + { + ID: FieldID("e"), + Links: NewLinks([]*Link{NewLink(d2, ds, df)}), + Type: ValueTypeString, + }, + { + ID: FieldID("f"), + Type: ValueTypeNumber, + }, + }, + }, + { + Original: &i3id, + Parent: nil, + SchemaGroup: psgid3, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("a"), + Type: ValueTypeString, + }, + { + ID: FieldID("b"), + Value: ValueTypeString.ValueFrom("b"), + Type: ValueTypeString, + }, + { + ID: FieldID("e"), + Links: NewLinks([]*Link{NewLink(d2, ds, df)}), + Type: ValueTypeString, + }, + { + ID: FieldID("f"), + Type: ValueTypeNumber, + }, + }, + }, + }, + } + + // Merge(nil, pp2, &d) + expected3 := &Merged{ + Original: nil, + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + Original: nil, + Parent: &i4id, + SchemaGroup: psgid1, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + Original: nil, + Parent: &i8id, + SchemaGroup: psgid1, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("1"), + Type: ValueTypeString, + }, + { + ID: FieldID("c"), + Value: ValueTypeString.ValueFrom("2"), + Type: ValueTypeString, + }, + { + ID: FieldID("d"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + { + ID: FieldID("f"), + Type: ValueTypeString, + }, + }, + }, + }, + }, + { + Original: nil, + Parent: &i5id, + SchemaGroup: psgid2, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("1"), + Type: ValueTypeString, + }, + { + ID: FieldID("c"), + Value: ValueTypeString.ValueFrom("2"), + Type: ValueTypeString, + }, + { + ID: FieldID("d"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + { + ID: FieldID("f"), + Type: ValueTypeString, + }, + }, + }, + { + Original: nil, + Parent: &i6id, + SchemaGroup: psgid4, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("1"), + Type: ValueTypeString, + }, + { + ID: FieldID("c"), + Value: ValueTypeString.ValueFrom("2"), + Type: ValueTypeString, + }, + { + ID: FieldID("d"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + { + ID: FieldID("f"), + Type: ValueTypeString, + }, + }, + }, + }, + } + + merged0 := Merge(nil, nil, nil) + assert.Nil(t, merged0) + merged1 := Merge(op, pp, nil) + assert.Nil(t, merged1) + merged2 := Merge(op, pp2, &d) + assert.Equal(t, expected1, merged2) + merged3 := Merge(op, nil, &d) + assert.Equal(t, expected2, merged3) + merged4 := Merge(nil, pp2, &d) + assert.Equal(t, expected3, merged4) +} diff --git a/server/pkg/property/migrator.go b/server/pkg/property/migrator.go new file mode 100644 index 000000000..b060d47ca --- /dev/null +++ b/server/pkg/property/migrator.go @@ -0,0 +1,15 @@ +package property + +type Migrator struct { + NewSchema *Schema + Plans []MigrationPlan +} + +type MigrationPlan struct { + From *Pointer + To *Pointer +} + +// func (m Migrator) Migrate(from *Property) *Property { + +// } diff --git a/server/pkg/property/pointer.go b/server/pkg/property/pointer.go new file mode 100644 index 000000000..f844bcc1a --- /dev/null +++ b/server/pkg/property/pointer.go @@ -0,0 +1,249 @@ +package property + +// Pointer is a pointer to a field and an item in properties and schemas +type Pointer struct { + schemaGroup *SchemaGroupID + item *ItemID + field *FieldID +} + +// NewPointer creates a new Pointer. +func NewPointer(sg *SchemaGroupID, i *ItemID, f *FieldID) *Pointer { + if sg == nil && i == nil && f == nil { + return nil + } + return &Pointer{ + schemaGroup: sg.CloneRef(), + item: i.CopyRef(), + field: f.CloneRef(), + } +} + +// PointToEverything creates a new Pointer pointing to all items and fields. +func PointToEverything() *Pointer { + return &Pointer{} +} + +// PointField creates a new Pointer pointing the field in properties. +func PointField(sg *SchemaGroupID, i *ItemID, f FieldID) *Pointer { + return &Pointer{ + schemaGroup: sg.CloneRef(), + item: i.CopyRef(), + field: &f, + } +} + +// PointField creates a new Pointer pointing the field in property schemas. +func PointFieldOnly(fid FieldID) *Pointer { + return &Pointer{ + field: &fid, + } +} + +// PointItemBySchema creates a new Pointer pointing the schema item in property schemas. +func PointItemBySchema(sg SchemaGroupID) *Pointer { + return &Pointer{ + schemaGroup: &sg, + } +} + +// PointItem creates a new Pointer pointing to the item in properties. +func PointItem(i ItemID) *Pointer { + return &Pointer{ + item: &i, + } +} + +// PointFieldBySchemaGroup creates a new Pointer pointing to the field of the schema field in properties. +func PointFieldBySchemaGroup(sg SchemaGroupID, f FieldID) *Pointer { + return &Pointer{ + schemaGroup: &sg, + field: &f, + } +} + +// PointFieldByItem creates a new Pointer pointing to the field of the item in properties. +func PointFieldByItem(i ItemID, f FieldID) *Pointer { + return &Pointer{ + item: &i, + field: &f, + } +} + +func (p *Pointer) Clone() *Pointer { + if p == nil { + return nil + } + return &Pointer{ + field: p.field.CloneRef(), + item: p.item.CopyRef(), + schemaGroup: p.schemaGroup.CloneRef(), + } +} + +func (p *Pointer) ItemBySchemaGroupAndItem() (i SchemaGroupID, i2 ItemID, ok bool) { + if p == nil || p.schemaGroup == nil || p.item == nil { + ok = false + return + } + i = *p.schemaGroup + i2 = *p.item + ok = true + return +} + +func (p *Pointer) ItemBySchemaGroup() (i SchemaGroupID, ok bool) { + if p == nil || p.schemaGroup == nil { + ok = false + return + } + i = *p.schemaGroup + ok = true + return +} + +func (p *Pointer) SchemaGroupAndItem() (i SchemaGroupID, i2 ItemID, ok bool) { + ok = false + if p == nil { + return + } + if p.schemaGroup != nil { + i = *p.schemaGroup + ok = true + } + if p.item != nil { + i2 = *p.item + ok = true + } + return +} + +func (p *Pointer) Item() (i ItemID, ok bool) { + if p == nil || p.item == nil { + ok = false + return + } + i = *p.item + ok = true + return +} + +func (p *Pointer) ItemRef() *ItemID { + if p == nil { + return nil + } + return p.item.CopyRef() +} + +func (p *Pointer) FieldByItem() (i ItemID, f FieldID, ok bool) { + if p == nil || p.item == nil || p.schemaGroup != nil || p.field == nil { + ok = false + return + } + i = *p.item + f = *p.field + ok = true + return +} + +func (p *Pointer) FieldBySchemaGroup() (sg SchemaGroupID, f FieldID, ok bool) { + if p == nil || p.schemaGroup == nil || p.item != nil || p.field == nil { + ok = false + return + } + sg = *p.schemaGroup + f = *p.field + ok = true + return +} + +func (p *Pointer) Field() (f FieldID, ok bool) { + if p == nil || p.field == nil { + ok = false + return + } + f = *p.field + ok = true + return +} + +func (p *Pointer) FieldRef() *FieldID { + f, ok := p.Field() + if !ok { + return nil + } + return f.Ref() +} + +func (p *Pointer) FieldOnly() (f FieldID, ok bool) { + if p == nil || p.field == nil || p.item != nil || p.schemaGroup != nil { + ok = false + return + } + f = *p.field + ok = true + return +} + +func (p *Pointer) FieldOnlyRef() *FieldID { + f, ok := p.FieldOnly() + if !ok { + return nil + } + return f.Ref() +} + +func (p *Pointer) FieldIfItemIs(sg SchemaGroupID, i ItemID) (f FieldID, ok bool) { + if p == nil || p.field == nil || !p.TestItem(sg, i) { + ok = false + return + } + f = *p.field + ok = true + return +} + +func (p *Pointer) FieldIfItemIsRef(sg SchemaGroupID, i ItemID) *FieldID { + f, ok := p.FieldIfItemIs(sg, i) + if !ok { + return nil + } + return f.Ref() +} + +func (p *Pointer) Test(sg SchemaGroupID, i ItemID, f FieldID) bool { + return p.TestItem(sg, i) && p.TestField(f) +} + +func (p *Pointer) TestItem(sg SchemaGroupID, i ItemID) bool { + return p.TestSchemaGroup(sg) && (p.item == nil || p.item.Equal(i)) +} + +func (p *Pointer) TestSchemaGroup(sg SchemaGroupID) bool { + return p != nil && (p.schemaGroup == nil || *p.schemaGroup == sg) +} + +func (p *Pointer) TestField(f FieldID) bool { + return p != nil && (p.field == nil || *p.field == f) +} + +func (p *Pointer) AllFields() *Pointer { + if p == nil || p.schemaGroup == nil && p.item == nil { + return nil + } + return &Pointer{ + schemaGroup: p.schemaGroup.CloneRef(), + item: p.item.CopyRef(), + field: nil, + } +} + +func (p *Pointer) GetAll() (sg *SchemaGroupID, i *ItemID, f *FieldID) { + if p == nil { + return + } + sg = p.schemaGroup.CloneRef() + i = p.item.CopyRef() + f = p.field.CloneRef() + return +} diff --git a/server/pkg/property/pointer_test.go b/server/pkg/property/pointer_test.go new file mode 100644 index 000000000..f0639c7c2 --- /dev/null +++ b/server/pkg/property/pointer_test.go @@ -0,0 +1,76 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestPointer(t *testing.T) { + iid := NewItemID() + sgid := SchemaGroupID("foo") + fid := FieldID("hoge") + + var p *Pointer + var ok bool + + p = PointItem(iid) + i, ok := p.Item() + assert.True(t, ok) + assert.Equal(t, iid, i) + _, ok = p.ItemBySchemaGroup() + assert.False(t, ok) + _, _, ok = p.FieldByItem() + assert.False(t, ok) + _, _, ok = p.FieldBySchemaGroup() + assert.False(t, ok) + + p = PointItemBySchema(sgid) + _, ok = p.Item() + assert.False(t, ok) + sg, ok := p.ItemBySchemaGroup() + assert.True(t, ok) + assert.Equal(t, sgid, sg) + _, _, ok = p.FieldByItem() + assert.False(t, ok) + _, _, ok = p.FieldBySchemaGroup() + assert.False(t, ok) + + p = PointFieldByItem(iid, fid) + i, ok = p.Item() + assert.True(t, ok) + assert.Equal(t, iid, i) + _, ok = p.ItemBySchemaGroup() + assert.False(t, ok) + i, f, ok := p.FieldByItem() + assert.True(t, ok) + assert.Equal(t, iid, i) + assert.Equal(t, fid, f) + _, _, ok = p.FieldBySchemaGroup() + assert.False(t, ok) + + p = PointFieldBySchemaGroup(sgid, fid) + _, ok = p.Item() + assert.False(t, ok) + sg, ok = p.ItemBySchemaGroup() + assert.True(t, ok) + assert.Equal(t, sgid, sg) + _, _, ok = p.FieldByItem() + assert.False(t, ok) + sg, f, ok = p.FieldBySchemaGroup() + assert.True(t, ok) + assert.Equal(t, sgid, sg) + assert.Equal(t, fid, f) + + p = PointField(&sgid, &iid, fid) + i, ok = p.Item() + assert.True(t, ok) + assert.Equal(t, iid, i) + sg, ok = p.ItemBySchemaGroup() + assert.True(t, ok) + assert.Equal(t, sgid, sg) + _, _, ok = p.FieldByItem() + assert.False(t, ok) + _, _, ok = p.FieldBySchemaGroup() + assert.False(t, ok) +} diff --git a/server/pkg/property/property.go b/server/pkg/property/property.go new file mode 100644 index 000000000..add864f59 --- /dev/null +++ b/server/pkg/property/property.go @@ -0,0 +1,643 @@ +package property + +import ( + "context" + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/dataset" +) + +type Property struct { + id ID + scene SceneID + schema SchemaID + items []Item +} + +func (p *Property) ID() ID { + return p.id +} + +func (p *Property) IDRef() *ID { + if p == nil { + return nil + } + return p.id.Ref() +} + +func (p *Property) Scene() SceneID { + return p.scene +} + +func (p *Property) Schema() SchemaID { + return p.schema +} + +func (p *Property) Field(ptr *Pointer) (*Field, *GroupList, *Group) { + if p == nil || ptr == nil { + return nil, nil, nil + } + + if g, gl := p.GroupAndList(ptr); g != nil { + if fields := g.Fields(ptr); len(fields) > 0 { + return fields[0], gl, g + } + } + + return nil, nil, nil +} + +func (p *Property) Items() []Item { + if p == nil { + return nil + } + return append([]Item{}, p.items...) +} + +func (p *Property) Item(ptr *Pointer) Item { + if p == nil || ptr == nil || ptr.FieldOnlyRef() != nil { + return nil + } + + for _, i := range p.items { + if ptr.TestItem(i.SchemaGroup(), i.ID()) { + return i + } + } + + return nil +} + +func (p *Property) GroupAndList(ptr *Pointer) (*Group, *GroupList) { + if p == nil || ptr == nil { + return nil, nil + } + + for _, i := range p.items { + if ptr.TestSchemaGroup(i.SchemaGroup()) { + if gl := ToGroupList(i); gl != nil { + return gl.GroupByPointer(ptr), gl + } else if g := ToGroup(i); g != nil { + return g, nil + } + } + } + + return nil, nil +} + +// ItemBySchema returns a root item by a schema group ID. +func (p *Property) ItemBySchema(id SchemaGroupID) Item { + if p == nil { + return nil + } + for _, f := range p.items { + if f.SchemaGroup() == id { + return f + } + } + return nil +} + +func (p *Property) GroupBySchema(id SchemaGroupID) *Group { + i := p.ItemBySchema(id) + if i == nil { + return nil + } + if g := ToGroup(i); g != nil { + return g + } + return nil +} + +func (p *Property) GroupListBySchema(id SchemaGroupID) *GroupList { + i := p.ItemBySchema(id) + if i == nil { + return nil + } + if g := ToGroupList(i); g != nil { + return g + } + return nil +} + +func (p *Property) ListItem(ptr *Pointer) (*Group, *GroupList) { + if p == nil { + return nil, nil + } + if sgid, i, ok := ptr.ItemBySchemaGroupAndItem(); ok { + if item := ToGroupList(p.ItemBySchema(sgid)); item != nil { + return item.Group(i), item + } + } else if iid, ok := ptr.Item(); ok { + for _, item := range p.items { + litem := ToGroupList(item) + if g := litem.Group(iid); g != nil { + return g, litem + } + } + } else if sgid, ok := ptr.ItemBySchemaGroup(); ok { + if item := ToGroupList(p.ItemBySchema(sgid)); item != nil { + return nil, item + } + } + return nil, nil +} + +func (p *Property) HasLinkedField() bool { + if p == nil { + return false + } + for _, f := range p.items { + if f.HasLinkedField() { + return true + } + } + return false +} + +func (p *Property) Clone() *Property { + if p == nil { + return nil + } + + items := make([]Item, 0, len(p.items)) + for _, i := range p.items { + items = append(items, i.CloneItem()) + } + + return &Property{ + id: p.id, + schema: p.schema, + scene: p.scene, + items: items, + } +} + +func (p *Property) Fields(ptr *Pointer) []*Field { + if p == nil || len(p.items) == 0 { + return nil + } + res := []*Field{} + for _, g := range p.items { + res = append(res, g.Fields(ptr)...) + } + return res +} + +func (p *Property) RemoveFields(ptr *Pointer) (res bool) { + if p == nil { + return + } + for _, g := range p.items { + if g.RemoveFields(ptr) { + res = true + } + } + return +} + +func (p *Property) FieldsByLinkedDataset(s DatasetSchemaID, i DatasetID) []*Field { + if p == nil { + return nil + } + res := []*Field{} + for _, g := range p.items { + res = append(res, g.FieldsByLinkedDataset(s, i)...) + } + return res +} + +func (p *Property) IsDatasetLinked(s DatasetSchemaID, i DatasetID) bool { + if p == nil { + return false + } + for _, g := range p.items { + if g.IsDatasetLinked(s, i) { + return true + } + } + return false +} + +func (p *Property) Datasets() []DatasetID { + if p == nil { + return nil + } + res := []DatasetID{} + + for _, f := range p.items { + res = append(res, f.Datasets()...) + } + + return res +} + +func (p *Property) AddItem(i Item) bool { + if p == nil || p.ItemBySchema(i.SchemaGroup()) != nil || p.Item(PointItem(i.ID())) != nil { + return false + } + p.items = append(p.items, i) + return true +} + +func (p *Property) RemoveItem(ptr *Pointer) { + if p == nil || ptr == nil { + return + } + + for i := 0; i < len(p.items); i++ { + item := p.items[i] + if ptr.TestItem(item.SchemaGroup(), item.ID()) { + p.items = append(p.items[:i], p.items[i+1:]...) + return + } + } +} + +func (p *Property) RemoveField(ptr *Pointer) { + if p == nil { + return + } + + fid, ok := ptr.Field() + if !ok { + return + } + + item := p.Item(ptr) + if group := ToGroup(item); group != nil { + group.RemoveField(fid) + } else if groupList := ToGroupList(item); groupList != nil { + groupList.RemoveFields(ptr) + } +} + +func (p *Property) Prune() (res bool) { + if p == nil { + return + } + for _, i := range p.items { + if i.Prune() { + res = true + } + if i.IsEmpty() { + p.RemoveItem(PointItem(i.ID())) + res = true + } + } + return +} + +func (p *Property) UpdateValue(ps *Schema, ptr *Pointer, v *Value) (*Field, *GroupList, *Group, error) { + field, gl, g, created := p.GetOrCreateField(ps, ptr) + if field == nil || created && v == nil { + // The field is empty and will be removed by prune, so it does not make sense + // p.Prune() + return nil, nil, nil, nil + } + + if err := field.Update(v, ps.Groups().Field(field.Field())); err != nil { + return nil, nil, nil, err + } + + if v == nil { + p.Prune() + if field.IsEmpty() { + field = nil + } + } + + return field, gl, g, nil +} + +func (p *Property) UnlinkAllByDataset(s DatasetSchemaID, ds DatasetID) { + fields := p.FieldsByLinkedDataset(s, ds) + for _, f := range fields { + f.Unlink() + } +} + +func (p *Property) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, *GroupList, *Group, bool) { + if p == nil || ps == nil || ptr == nil || !ps.ID().Equal(p.Schema()) { + return nil, nil, nil, false + } + + if field, pgl, pg := p.Field(ptr); field != nil { + return field, pgl, pg, false + } + + // if the field does not exist, create it here + + fid, ok := ptr.Field() + if !ok { + return nil, nil, nil, false + } + g, gl := p.GetOrCreateGroup(ps, ptr) + f2, ok := g.GetOrCreateField(ps, fid) + return f2, gl, g, ok +} + +func (p *Property) GetOrCreateItem(ps *Schema, ptr *Pointer) (Item, *GroupList) { + if p == nil || ps == nil || ptr == nil || !ps.ID().Equal(p.Schema()) { + return nil, nil + } + + if g, gl := p.GroupAndList(ptr); g != nil || gl != nil { + if g == nil { + return gl, nil + } + return g, gl + } + + psgid, ok := ptr.ItemBySchemaGroup() + if !ok { + return nil, nil + } + + psg := ps.Groups().Group(psgid) + if psg == nil { + return nil, nil + } + + ni := InitItemFrom(psg) + if ni != nil { + _ = p.AddItem(ni) + } + + return ni, nil // root item +} + +func (p *Property) GetOrCreateGroup(ps *Schema, ptr *Pointer) (*Group, *GroupList) { + if p == nil || ps == nil || ptr == nil || !ps.ID().Equal(p.Schema()) { + return nil, nil + } + + var psg *SchemaGroup + if psgid, ok := ptr.ItemBySchemaGroup(); ok { + psg = ps.Groups().Group(psgid) + } else if f, ok := ptr.Field(); ok { + psg = ps.Groups().GroupByField(f) + } + if psg == nil { + return nil, nil + } + + item, gl := p.GetOrCreateItem(ps, ptr) + return ToGroup(item), gl +} + +func (p *Property) GetOrCreateRootGroup(ptr *Pointer) (*Group, bool) { + if p == nil || ptr == nil { + return nil, false + } + + if i := p.Item(ptr); i != nil { + return ToGroup(i), false + } + + sg, ok := ptr.ItemBySchemaGroup() + if !ok { + return nil, false + } + + ng, err := NewGroup().NewID().SchemaGroup(sg).Build() + if err != nil { + return nil, false + } + + return ng, p.AddItem(ng) +} + +func (p *Property) GetOrCreateGroupList(ps *Schema, ptr *Pointer) *GroupList { + if p == nil || ps == nil || ptr == nil || !ps.ID().Equal(p.Schema()) { + return nil + } + + var psg *SchemaGroup + if psgid, ok := ptr.ItemBySchemaGroup(); ok { + psg = ps.Groups().Group(psgid) + } else if f, ok := ptr.Field(); ok { + psg = ps.Groups().GroupByField(f) + } + if psg == nil { + return nil + } + + item, _ := p.GetOrCreateItem(ps, ptr) + return ToGroupList(item) +} + +func (p *Property) AddListItem(ps *Schema, ptr *Pointer, index *int) (*Group, *GroupList) { + item, _ := p.GetOrCreateItem(ps, ptr) + pgl := ToGroupList(item) + if pgl == nil { + return nil, nil + } + return pgl.CreateAndAddListItem(ps, index), pgl +} + +func (p *Property) MoveListItem(ptr *Pointer, i int) (*Group, *GroupList) { + if ptr == nil { + return nil, nil + } + g, l := p.ListItem(ptr) + if g == nil || l == nil { + return nil, nil + } + l.Move(g.ID(), i) + return g, l +} + +func (p *Property) RemoveListItem(ptr *Pointer) bool { + if p == nil || ptr == nil { + return false + } + g, l := p.ListItem(ptr) + if g == nil || l == nil { + return false + } + ok := l.Remove(g.ID()) + if ok { + p.Prune() + } + return ok +} + +func (p *Property) UpdateLinkableValue(s *Schema, v *Value) { + if s == nil || p == nil || v == nil { + return + } + + sfid := s.linkable.FieldByType(v.Type()) + if sfid == nil { + return + } + + sf := s.Groups().GroupAndField(*sfid) + if sf == nil { + return + } + + f, _, _, ok := p.GetOrCreateField(s, sf.Pointer()) + if ok { + if err := f.Update(v, sf.Field); err != nil { + p.Prune() + } + } +} + +func (p *Property) AutoLinkField(s *Schema, v ValueType, d DatasetSchemaID, df *DatasetFieldID, ds *DatasetID) { + if s == nil || p == nil || df == nil { + return + } + + sfid := s.linkable.FieldByType(v) + if sfid == nil { + return + } + + sf := s.Groups().GroupAndField(*sfid) + if sf == nil { + return + } + + f, _, _, ok := p.GetOrCreateField(s, sf.Pointer()) + if ok { + if ds == nil { + f.Link(NewLinks([]*Link{NewLinkFieldOnly(d, *df)})) + } else { + f.Link(NewLinks([]*Link{NewLink(*ds, d, *df)})) + } + } +} + +// TODO: group migration +func (p *Property) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset.Loader) { + if p == nil || dl == nil { + return + } + p.schema = newSchema.ID() + + for _, f := range p.items { + f.MigrateSchema(ctx, newSchema, dl) + } + + p.Prune() +} + +func (p *Property) MigrateDataset(q DatasetMigrationParam) { + if p == nil { + return + } + for _, f := range p.items { + f.MigrateDataset(q) + } + p.Prune() +} + +func (p *Property) ValidateSchema(ps *Schema) error { + if p == nil { + return nil + } + if ps == nil { + return errors.New("invalid schema") + } + if p.schema != ps.ID() { + return errors.New("invalid schema id") + } + + for _, i := range p.items { + sg := i.SchemaGroup() + if err := i.ValidateSchema(ps.Groups().Group(sg)); err != nil { + return fmt.Errorf("%s (%s): %w", p.ID(), sg, err) + } + } + + return nil +} + +// MoveFields moves fields between items. Only fields in Groups can be moved to another Group, fields in GroupLists will simply be deleted. +func (p *Property) MoveFields(from, to *Pointer) (res bool) { + if p == nil { + return + } + + fields := p.GroupAndFields(from) + if len(fields) == 0 { + return + } + + toGroup, created := p.GetOrCreateRootGroup(to) + if created { + res = true + } + + for _, f := range fields { + if f.Group.RemoveField(f.Field.Field()) { + res = true + } + // For root group only + if f.ParentGroup == nil && toGroup != nil { + // NOTE: currently changing the field ID is not supported + toGroup.AddFields(f.Field) + res = true + } + } + + return +} + +func (p *Property) GroupAndFields(ptr *Pointer) []GroupAndField { + if p == nil || len(p.items) == 0 { + return nil + } + res := []GroupAndField{} + for _, i := range p.items { + if ptr == nil || ptr.TestSchemaGroup(i.SchemaGroup()) { + res = append(res, i.GroupAndFields(ptr)...) + } + } + return res +} + +// Cast changes the type of fields that are matches the pointer +func (p *Property) Cast(ptr *Pointer, t ValueType) (res bool) { + for _, f := range p.Fields(ptr) { + if f.Cast(t) { + res = true + } + } + return +} + +func (p *Property) GuessSchema() *Schema { + if p == nil { + return nil + } + + groups := make([]*SchemaGroup, 0, len(p.items)) + for _, i := range p.items { + if g := i.GuessSchema(); g != nil { + groups = append(groups, g) + } + } + + if s, err := NewSchema().ID(p.Schema()).Groups(NewSchemaGroupList(groups)).Build(); err == nil { + return s + } + return nil +} + +func (p *Property) updateSchema(s SchemaID) bool { + if p == nil || s.IsNil() || p.schema.Equal(s) { + return false + } + p.schema = s.Clone() + return true +} + +func (p *Property) SetSchema(schema SchemaID) { + p.schema = schema.Clone() +} diff --git a/server/pkg/property/property_test.go b/server/pkg/property/property_test.go new file mode 100644 index 000000000..aaca5c8dd --- /dev/null +++ b/server/pkg/property/property_test.go @@ -0,0 +1,850 @@ +package property + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +var ( + testProperty1 = New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{testGroup1, testGroupList1}).MustBuild() +) + +func TestProperty_MigrateSchema(t *testing.T) { + sceneID := NewSceneID() + oldSchema := MustSchemaID("hoge~1.0.0/test") + newSchema := MustSchemaID("hoge~1.0.0/test2") + schemaField1ID := FieldID("a") + schemaField2ID := FieldID("b") + schemaField3ID := FieldID("c") + schemaField4ID := FieldID("d") + schemaField5ID := FieldID("e") + schemaField6ID := FieldID("f") + schemaField7ID := FieldID("g") + schemaField8ID := FieldID("h") + schemaGroupID := SchemaGroupID("i") + datasetID := NewDatasetID() + datasetSchemaID := NewDatasetSchemaID() + datasetFieldID := NewDatasetFieldID() + + schemaField1, _ := NewSchemaField().ID(schemaField1ID).Type(ValueTypeString).Build() + schemaField2, _ := NewSchemaField().ID(schemaField2ID).Type(ValueTypeNumber).Min(0).Max(100).Build() + schemaField3, _ := NewSchemaField().ID(schemaField3ID).Type(ValueTypeNumber).Min(0).Max(100).Build() + schemaField4, _ := NewSchemaField().ID(schemaField4ID).Type(ValueTypeString).Choices([]SchemaFieldChoice{ + {Title: i18n.StringFrom("x"), Key: "x"}, + {Title: i18n.StringFrom("y"), Key: "y"}, + }).Build() + schemaField5, _ := NewSchemaField().ID(schemaField5ID).Type(ValueTypeString).Build() + schemaField6, _ := NewSchemaField().ID(schemaField6ID).Type(ValueTypeNumber).Build() + schemaField7, _ := NewSchemaField().ID(schemaField7ID).Type(ValueTypeNumber).Build() + schemaFields := []*SchemaField{ + schemaField1, + schemaField2, + schemaField3, + schemaField4, + schemaField5, + schemaField6, + schemaField7, + } + schemaGroups := NewSchemaGroupList([]*SchemaGroup{ + NewSchemaGroup().ID(schemaGroupID).Fields(schemaFields).MustBuild(), + }) + + fields := []*Field{ + // should remain + NewField(schemaField1ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("foobar"))). + MustBuild(), + // should be removed because of max + NewField(schemaField2ID). + Value(OptionalValueFrom(ValueTypeNumber.ValueFrom(101))). + MustBuild(), + // should remain + NewField(schemaField3ID). + Value(OptionalValueFrom(ValueTypeNumber.ValueFrom(1))). + MustBuild(), + // should be removed because of choices + NewField(schemaField4ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("z"))). + MustBuild(), + // should remain + NewField(schemaField5ID). + Type(ValueTypeString). + Links(NewLinks([]*Link{ + NewLink(datasetID, datasetSchemaID, datasetFieldID), + })). + MustBuild(), + // should be removed because of linked dataset field value type + NewField(schemaField6ID). + Type(ValueTypeString). + Links(NewLinks([]*Link{ + NewLink(datasetID, datasetSchemaID, datasetFieldID), + })). + MustBuild(), + // should be removed because of type + NewField(schemaField7ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("hogehoge"))). + MustBuild(), + // should be removed because of not existing field + NewField(schemaField8ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("hogehoge"))). + MustBuild(), + } + items := []Item{ + NewGroup().NewID().SchemaGroup(schemaGroupID).Fields(fields).MustBuild(), + } + + datasetFields := []*dataset.Field{ + dataset.NewField(datasetFieldID, dataset.ValueTypeString.ValueFrom("a"), ""), + } + + schema, _ := NewSchema().ID(newSchema).Groups(schemaGroups).Build() + property, _ := New().NewID().Scene(sceneID).Schema(oldSchema).Items(items).Build() + ds, _ := dataset.New().ID(datasetID).Schema(datasetSchemaID).Scene(sceneID).Fields(datasetFields).Build() + + property.MigrateSchema(context.Background(), schema, dataset.LoaderFrom([]*dataset.Dataset{ds})) + + newGroup := ToGroup(property.ItemBySchema(schemaGroupID)) + newFields := newGroup.Fields(nil) + + assert.Equal(t, schema.ID(), property.Schema()) + assert.Equal(t, 1, len(property.Items())) + assert.Equal(t, 3, len(newFields)) + assert.NotNil(t, newGroup.Field(schemaField1ID)) + assert.NotNil(t, newGroup.Field(schemaField3ID)) + assert.NotNil(t, newGroup.Field(schemaField5ID)) +} + +func TestGetOrCreateItem(t *testing.T) { + sceneID := NewSceneID() + sid, _ := SchemaIDFrom("hoge~1.0.0/test") + sf1id := FieldID("a") + sf2id := FieldID("b") + sg1id := SchemaGroupID("c") + sg2id := SchemaGroupID("d") + + sf1 := NewSchemaField().ID(sf1id).Type(ValueTypeString).MustBuild() + sg1 := NewSchemaGroup().ID(sg1id).Fields([]*SchemaField{sf1}).MustBuild() + sf2 := NewSchemaField().ID(sf2id).Type(ValueTypeString).MustBuild() + sg2 := NewSchemaGroup().ID(sg2id).Fields([]*SchemaField{sf2}).IsList(true).MustBuild() + s := NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg1, sg2})).MustBuild() + + p := New().NewID().Scene(sceneID).Schema(sid).MustBuild() + + // group + assert.Nil(t, p.ItemBySchema(sg1id)) + assert.Equal(t, []Item{}, p.Items()) + + i, gl := p.GetOrCreateItem(s, PointItemBySchema(sg1id)) + assert.Nil(t, gl) + assert.NotNil(t, i) + assert.Equal(t, sg1id, i.SchemaGroup()) + assert.Equal(t, i, ToGroup(p.ItemBySchema(sg1id))) + assert.Equal(t, []Item{i}, p.Items()) + + i2, gl := p.GetOrCreateItem(s, PointItemBySchema(sg1id)) + assert.Nil(t, gl) + assert.NotNil(t, i2) + assert.Equal(t, i, i2) + assert.Equal(t, i2, ToGroup(p.ItemBySchema(sg1id))) + assert.Equal(t, []Item{i2}, p.Items()) + + // group list + assert.Nil(t, p.ItemBySchema(sg2id)) + + i3, gl := p.GetOrCreateItem(s, PointItemBySchema(sg2id)) + assert.Nil(t, gl) + assert.NotNil(t, i3) + assert.Equal(t, sg2id, i3.SchemaGroup()) + assert.Equal(t, i3, ToGroupList(p.ItemBySchema(sg2id))) + assert.Equal(t, []Item{i, i3}, p.Items()) + + i4, gl := p.GetOrCreateItem(s, PointItemBySchema(sg2id)) + assert.Nil(t, gl) + assert.NotNil(t, i4) + assert.Equal(t, i3, i4) + assert.Equal(t, i4, ToGroupList(p.ItemBySchema(sg2id))) + assert.Equal(t, []Item{i2, i4}, p.Items()) +} + +func TestGetOrCreateField(t *testing.T) { + sceneID := NewSceneID() + sid, _ := SchemaIDFrom("hoge~1.0.0/test") + sf1id := FieldID("a") + sf2id := FieldID("b") + sg1id := SchemaGroupID("c") + sg2id := SchemaGroupID("d") + + sf1 := NewSchemaField().ID(sf1id).Type(ValueTypeString).MustBuild() + sg1 := NewSchemaGroup().ID(sg1id).Fields([]*SchemaField{sf1}).MustBuild() + sf2 := NewSchemaField().ID(sf2id).Type(ValueTypeString).MustBuild() + sg2 := NewSchemaGroup().ID(sg2id).Fields([]*SchemaField{sf2}).IsList(true).MustBuild() + s := NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg1, sg2})).MustBuild() + + p := New().NewID().Scene(sceneID).Schema(sid).MustBuild() + + // field and group will be created + assert.Nil(t, p.ItemBySchema(sg1id)) + assert.Equal(t, []Item{}, p.Items()) + + f, _, _, created := p.GetOrCreateField(s, PointFieldBySchemaGroup(sg1id, sf1id)) + assert.NotNil(t, f) + assert.True(t, created) + assert.Equal(t, sf1id, f.Field()) + i := ToGroup(p.ItemBySchema(sg1id)) + assert.Equal(t, sg1id, i.SchemaGroup()) + assert.Equal(t, []*Field{f}, i.Fields(nil)) + field, _, _ := p.Field(PointFieldBySchemaGroup(sg1id, sf1id)) + assert.Equal(t, f, field) + + f2, _, _, created := p.GetOrCreateField(s, PointFieldBySchemaGroup(sg1id, sf1id)) + assert.NotNil(t, f2) + assert.False(t, created) + assert.Equal(t, f, f2) + i2 := ToGroup(p.ItemBySchema(sg1id)) + assert.Equal(t, i, i2) + field, _, _ = p.Field(PointFieldBySchemaGroup(sg1id, sf1id)) + assert.Equal(t, f2, field) + + // field will not be created if field is incorrect + f3, _, _, _ := p.GetOrCreateField(s, PointFieldBySchemaGroup(sg1id, sf2id)) + assert.Nil(t, f3) + + // field and group list will not be created + assert.Nil(t, p.ItemBySchema(sg2id)) + f4, _, _, _ := p.GetOrCreateField(s, PointFieldBySchemaGroup(sg1id, sf2id)) + assert.Nil(t, f4) + assert.Nil(t, p.ItemBySchema(sg2id)) + assert.Equal(t, []Item{i}, p.Items()) +} + +func TestAddListItem(t *testing.T) { + sceneID := NewSceneID() + sid, _ := SchemaIDFrom("hoge~1.0.0/test") + sfid := FieldID("a") + sgid := SchemaGroupID("b") + sf := NewSchemaField().ID(sfid).Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID(sgid).Fields([]*SchemaField{sf}).IsList(true).MustBuild() + ps := NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild() + p := New().NewID().Scene(sceneID).Schema(sid).MustBuild() + + item, _ := p.AddListItem(ps, PointItemBySchema(sgid), nil) + assert.Equal(t, sgid, item.SchemaGroup()) + _, list := p.ListItem(PointItemBySchema(sgid)) + assert.Equal(t, sgid, list.SchemaGroup()) + assert.Equal(t, []*Group{item}, list.Groups()) + + index := 0 + item2, _ := p.AddListItem(ps, PointItem(list.ID()), &index) + assert.Equal(t, sgid, item2.SchemaGroup()) + assert.Equal(t, []*Group{item2, item}, list.Groups()) +} + +func TestMoveListItem(t *testing.T) { + sceneID := NewSceneID() + sid, _ := SchemaIDFrom("hoge~1.0.0/test") + sgid := SchemaGroupID("b") + g1 := NewGroup().NewID().SchemaGroup(sgid).MustBuild() + g2 := NewGroup().NewID().SchemaGroup(sgid).MustBuild() + gl := NewGroupList().NewID().SchemaGroup(sgid).Groups([]*Group{g1, g2}).MustBuild() + p := New().NewID().Scene(sceneID).Schema(sid).Items([]Item{gl}).MustBuild() + + assert.Equal(t, []*Group{g1, g2}, gl.Groups()) + i, _ := p.MoveListItem(PointItem(g1.ID()), 1) + assert.Equal(t, g1, i) + assert.Equal(t, []*Group{g2, g1}, gl.Groups()) +} + +func TestRemoveListItem(t *testing.T) { + sceneID := NewSceneID() + sid, _ := SchemaIDFrom("hoge~1.0.0/test") + sgid := SchemaGroupID("b") + g1 := NewGroup().NewID().SchemaGroup(sgid).MustBuild() + g2 := NewGroup().NewID().SchemaGroup(sgid).MustBuild() + gl := NewGroupList().NewID().SchemaGroup(sgid).Groups([]*Group{g1, g2}).MustBuild() + p := New().NewID().Scene(sceneID).Schema(sid).Items([]Item{gl}).MustBuild() + + assert.Equal(t, []*Group{g1, g2}, gl.Groups()) + ok := p.RemoveListItem(PointItem(g1.ID())) + assert.True(t, ok) + assert.Equal(t, []*Group{g2}, gl.Groups()) + assert.Equal(t, 1, len(p.Items())) + + ok = p.RemoveListItem(NewPointer(sgid.Ref(), g2.IDRef(), nil)) + assert.True(t, ok) + assert.Equal(t, []*Group{}, gl.Groups()) + assert.Equal(t, 0, len(p.Items())) +} + +func TestPointer_Test(t *testing.T) { + itemID := NewItemID() + + type args struct { + sg SchemaGroupID + i ItemID + f FieldID + want bool + } + tests := []struct { + name string + target *Pointer + args []args + }{ + { + name: "schema group only", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref()}, + args: []args{ + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("a"), want: true}, + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("b"), want: true}, + {sg: SchemaGroupID("yy"), i: itemID, f: FieldID("a"), want: false}, + }, + }, + { + name: "item only", + target: &Pointer{item: itemID.Ref()}, + args: []args{ + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("a"), want: true}, + {sg: SchemaGroupID("yy"), i: itemID, f: FieldID("a"), want: true}, + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("b"), want: true}, + {sg: SchemaGroupID("xx"), i: NewItemID(), f: FieldID("a"), want: false}, + }, + }, + { + name: "schema group and item", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref(), item: itemID.Ref()}, + args: []args{ + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("a"), want: true}, + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("b"), want: true}, + {sg: SchemaGroupID("xx"), i: NewItemID(), f: FieldID("a"), want: false}, + {sg: SchemaGroupID("yy"), i: itemID, f: FieldID("a"), want: false}, + {sg: SchemaGroupID("yy"), i: NewItemID(), f: FieldID("a"), want: false}, + }, + }, + { + name: "all", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref(), item: itemID.Ref(), field: FieldID("a").Ref()}, + args: []args{ + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("a"), want: true}, + {sg: SchemaGroupID("yy"), i: itemID, f: FieldID("a"), want: false}, + {sg: SchemaGroupID("xx"), i: NewItemID(), f: FieldID("a"), want: false}, + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("b"), want: false}, + }, + }, + { + name: "empty", + target: &Pointer{}, + args: []args{ + {sg: SchemaGroupID("xx"), i: NewItemID(), f: FieldID("a"), want: true}, + {sg: SchemaGroupID("yy"), i: NewItemID(), f: FieldID("b"), want: true}, + {sg: SchemaGroupID("zz"), i: NewItemID(), f: FieldID("c"), want: true}, + }, + }, + { + name: "nil", + target: nil, + args: []args{ + {sg: SchemaGroupID("xx"), i: NewItemID(), f: FieldID("a"), want: false}, + {sg: SchemaGroupID("yy"), i: NewItemID(), f: FieldID("b"), want: false}, + {sg: SchemaGroupID("zz"), i: NewItemID(), f: FieldID("c"), want: false}, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + for i, a := range tt.args { + assert.Equal(t, a.want, tt.target.Test(a.sg, a.i, a.f), "test %d", i) + } + }) + } +} + +func TestPointer_TestItem(t *testing.T) { + iid := NewItemID() + + type args struct { + sg SchemaGroupID + i ItemID + } + tests := []struct { + name string + target *Pointer + args args + want bool + }{ + { + name: "true schema group only", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref()}, + args: args{sg: SchemaGroupID("xx"), i: iid}, + want: true, + }, + { + name: "true item only", + target: &Pointer{item: iid.Ref()}, + args: args{sg: SchemaGroupID("xx"), i: iid}, + want: true, + }, + { + name: "true schema group and item", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref(), item: iid.Ref()}, + args: args{sg: SchemaGroupID("xx"), i: iid}, + want: true, + }, + { + name: "true empty", + target: &Pointer{}, + args: args{sg: SchemaGroupID("xx"), i: iid}, + want: true, + }, + { + name: "false schema group only", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref()}, + args: args{sg: SchemaGroupID("yy"), i: iid}, + want: false, + }, + { + name: "false item only", + target: &Pointer{item: iid.Ref()}, + args: args{sg: SchemaGroupID("xx"), i: NewItemID()}, + want: false, + }, + { + name: "false schema group and item", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref(), item: iid.Ref()}, + args: args{sg: SchemaGroupID("xx"), i: NewItemID()}, + want: false, + }, + { + name: "false nil", + target: nil, + args: args{sg: SchemaGroupID("xx"), i: iid}, + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.TestItem(tt.args.sg, tt.args.i)) + }) + } +} + +func TestPointer_TestSchemaGroup(t *testing.T) { + type args struct { + sg SchemaGroupID + } + tests := []struct { + name string + target *Pointer + args args + want bool + }{ + { + name: "true", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref()}, + args: args{sg: SchemaGroupID("xx")}, + want: true, + }, + { + name: "false", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref()}, + args: args{sg: SchemaGroupID("yy")}, + want: false, + }, + { + name: "empty", + target: &Pointer{}, + args: args{sg: SchemaGroupID("xx")}, + want: true, + }, + { + name: "nil", + target: nil, + args: args{sg: SchemaGroupID("xx")}, + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.TestSchemaGroup(tt.args.sg)) + }) + } +} + +func TestPointer_TestField(t *testing.T) { + type args struct { + f FieldID + } + tests := []struct { + name string + target *Pointer + args args + want bool + }{ + { + name: "true", + target: &Pointer{field: FieldID("xx").Ref()}, + args: args{f: FieldID("xx")}, + want: true, + }, + { + name: "false", + target: &Pointer{field: FieldID("xx").Ref()}, + args: args{f: FieldID("yy")}, + want: false, + }, + { + name: "empty", + target: &Pointer{}, + args: args{f: FieldID("xx")}, + want: true, + }, + { + name: "nil", + target: nil, + args: args{f: FieldID("xx")}, + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.TestField(tt.args.f)) + }) + } +} + +func TestProperty_MoveFields(t *testing.T) { + itemID1 := NewItemID() + itemID2 := NewItemID() + + type args struct { + from *Pointer + to *Pointer + } + tests := []struct { + name string + target *Property + args args + wantRes bool + wantFieldsFrom []*Field + wantFieldsTo []*Field + }{ + { + name: "same group", + target: testProperty1.Clone(), + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(testGroup1.SchemaGroup().Ref(), nil, FieldID("x").Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{testField1}, // changing field ID is not supported + wantFieldsTo: []*Field{testField1}, + }, + { + name: "group -> group", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroup().NewID().SchemaGroup(testSchemaGroup1.ID()).Fields([]*Field{testField1}).MustBuild(), + NewGroup().NewID().SchemaGroup("x").Fields([]*Field{testField2}).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(SchemaGroupID("x").Ref(), nil, testField1.Field().Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, + wantFieldsTo: []*Field{testField2, testField1}, + }, + { + name: "group -> group (new)", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroup().NewID().SchemaGroup(testSchemaGroup1.ID()).Fields([]*Field{testField1}).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(SchemaGroupID("x").Ref(), nil, testField1.Field().Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, + wantFieldsTo: []*Field{testField1}, + }, + { + name: "group -> group (rename)", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroup().NewID().SchemaGroup(testSchemaGroup1.ID()).Fields([]*Field{testField1}).MustBuild(), + NewGroup().NewID().SchemaGroup("x").Fields([]*Field{}).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(SchemaGroupID("x").Ref(), nil, FieldID("y").Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, + wantFieldsTo: []*Field{testField1}, // changing field ID is not supported + }, + { + name: "group -> group (field nil)", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroup().NewID().SchemaGroup(testSchemaGroup1.ID()).Fields([]*Field{testField1}).MustBuild(), + NewGroup().NewID().SchemaGroup("x").Fields([]*Field{}).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(SchemaGroupID("x").Ref(), nil, nil), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, + wantFieldsTo: []*Field{testField1}, + }, + { + name: "group -> list", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroup().NewID().SchemaGroup(testSchemaGroup1.ID()).Fields([]*Field{testField1}).MustBuild(), + NewGroupList().NewID().SchemaGroup(testSchemaGroup2.ID()).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(testSchemaGroup2.ID().Ref(), nil, testField1.Field().Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, // deleted + wantFieldsTo: []*Field{}, // not moved + }, + { + name: "list -> group", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroup().NewID().SchemaGroup(SchemaGroupID("x")).Fields([]*Field{testField1}).MustBuild(), + NewGroupList().NewID().SchemaGroup(SchemaGroupID("y")).Groups([]*Group{ + NewGroup().ID(itemID1).SchemaGroup(SchemaGroupID("y")).Fields([]*Field{testField2}).MustBuild(), + }).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(SchemaGroupID("y").Ref(), itemID1.Ref(), testField2.Field().Ref()), + to: NewPointer(SchemaGroupID("x").Ref(), nil, testField2.Field().Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, // deleted + wantFieldsTo: []*Field{testField1}, // not moved + }, + { + name: "list -> list", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroupList().NewID().SchemaGroup(SchemaGroupID("x")).Groups([]*Group{ + NewGroup().ID(itemID1).SchemaGroup(SchemaGroupID("x")).Fields([]*Field{testField1}).MustBuild(), + }).MustBuild(), + NewGroupList().NewID().SchemaGroup(SchemaGroupID("y")).Groups([]*Group{ + NewGroup().ID(itemID2).SchemaGroup(SchemaGroupID("y")).Fields([]*Field{testField2}).MustBuild(), + }).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(SchemaGroupID("x").Ref(), itemID1.Ref(), testField1.Field().Ref()), + to: NewPointer(SchemaGroupID("y").Ref(), itemID2.Ref(), testField2.Field().Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, // deleted + wantFieldsTo: []*Field{testField2}, // not moved + }, + { + name: "nil", + target: nil, + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(testGroup1.SchemaGroup().Ref(), nil, FieldID("x").Ref()), + }, + wantRes: false, + wantFieldsFrom: nil, + wantFieldsTo: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.wantRes, tt.target.MoveFields(tt.args.from, tt.args.to)) + assert.Equal(t, tt.wantFieldsFrom, tt.target.Fields(tt.args.from.AllFields())) + assert.Equal(t, tt.wantFieldsTo, tt.target.Fields(tt.args.to.AllFields())) + }) + } +} + +func TestProperty_GroupAndList(t *testing.T) { + type args struct { + ptr *Pointer + } + + pgid1 := NewItemID() + pgid2 := NewItemID() + + tests := []struct { + name string + target *Property + args args + want *Group + want1 *GroupList + }{ + { + name: "found", + target: &Property{ + items: []Item{ + &GroupList{ + itemBase: itemBase{ + ID: pgid1, + SchemaGroup: SchemaGroupID("aaaa"), + }, + groups: []*Group{ + { + itemBase: itemBase{ + ID: pgid2, + SchemaGroup: SchemaGroupID("aaaa"), + }, + }, + }, + }, + }, + }, + args: args{ + ptr: &Pointer{ + schemaGroup: SchemaGroupID("aaaa").Ref(), + item: pgid2.Ref(), + field: nil, + }, + }, + want: &Group{ + itemBase: itemBase{ + ID: pgid2, + SchemaGroup: SchemaGroupID("aaaa"), + }, + }, + want1: &GroupList{ + itemBase: itemBase{ + ID: pgid1, + SchemaGroup: SchemaGroupID("aaaa"), + }, + groups: []*Group{ + { + itemBase: itemBase{ + ID: pgid2, + SchemaGroup: SchemaGroupID("aaaa"), + }, + }, + }, + }, + }, + { + name: "list only", + target: &Property{ + items: []Item{ + &GroupList{ + itemBase: itemBase{ + ID: pgid1, + SchemaGroup: SchemaGroupID("aaaa"), + }, + groups: []*Group{ + { + itemBase: itemBase{ + ID: pgid2, + SchemaGroup: SchemaGroupID("aaaa"), + }, + }, + }, + }, + }, + }, + args: args{ + ptr: &Pointer{ + schemaGroup: SchemaGroupID("aaaa").Ref(), + item: pgid1.Ref(), + field: nil, + }, + }, + want: nil, + want1: &GroupList{ + itemBase: itemBase{ + ID: pgid1, + SchemaGroup: SchemaGroupID("aaaa"), + }, + groups: []*Group{ + { + itemBase: itemBase{ + ID: pgid2, + SchemaGroup: SchemaGroupID("aaaa"), + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + got, got1 := tt.target.GroupAndList(tt.args.ptr) + assert.Equal(t, tt.want, got) + assert.Equal(t, tt.want1, got1) + }) + } +} + +func TestProperty_AddItem(t *testing.T) { + type args struct { + i Item + } + + iid := NewItemID() + + tests := []struct { + name string + target *Property + args args + want bool + wantItems []Item + }{ + { + name: "ok", + target: &Property{}, + args: args{i: &Group{}}, + want: true, + wantItems: []Item{&Group{}}, + }, + { + name: "schema group duplicated", + target: &Property{items: []Item{&Group{itemBase: itemBase{SchemaGroup: "a"}}}}, + args: args{i: &Group{itemBase: itemBase{SchemaGroup: "a"}}}, + want: false, + wantItems: []Item{&Group{itemBase: itemBase{SchemaGroup: "a"}}}, + }, + { + name: "id duplicated", + target: &Property{items: []Item{&Group{itemBase: itemBase{ID: iid}}}}, + args: args{i: &Group{itemBase: itemBase{ID: iid}}}, + want: false, + wantItems: []Item{&Group{itemBase: itemBase{ID: iid}}}, + }, + { + name: "nil", + target: nil, + args: args{i: &Group{}}, + want: false, + wantItems: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.AddItem(tt.args.i)) + if tt.target != nil { + assert.Equal(t, tt.wantItems, tt.target.items) + } + }) + } +} diff --git a/server/pkg/property/schema.go b/server/pkg/property/schema.go new file mode 100644 index 000000000..5b23a417c --- /dev/null +++ b/server/pkg/property/schema.go @@ -0,0 +1,88 @@ +package property + +type Schema struct { + id SchemaID + version int + groups *SchemaGroupList + linkable LinkableFields +} + +type LinkableFields struct { + LatLng *SchemaFieldPointer + URL *SchemaFieldPointer +} + +func (p *Schema) ID() SchemaID { + return p.id +} + +func (p *Schema) IDRef() *SchemaID { + if p == nil { + return nil + } + return p.id.Ref() +} + +func (p *Schema) Scene() *SceneID { + return p.id.Plugin().Scene() +} + +func (p *Schema) Version() int { + return p.version +} + +func (p *Schema) Groups() *SchemaGroupList { + if p == nil { + return nil + } + return p.groups +} + +func (p *Schema) LinkableFields() LinkableFields { + if p == nil { + return LinkableFields{} + } + return p.linkable.Clone() +} + +func (p LinkableFields) Clone() LinkableFields { + return LinkableFields{ + LatLng: p.LatLng.Clone(), + URL: p.URL.Clone(), + } +} + +func (l LinkableFields) Validate(s *Schema) bool { + if s == nil { + return false + } + if l.LatLng != nil { + if f := s.Groups().Field(l.LatLng.Field); f == nil { + return false + } + } + if l.URL != nil { + if f := s.Groups().Field(l.URL.Field); f == nil { + return false + } + } + return true +} + +func (l LinkableFields) PointerByType(ty ValueType) *SchemaFieldPointer { + switch ty { + case ValueTypeLatLng: + return l.LatLng + case ValueTypeURL: + return l.URL + } + return nil +} + +func (l LinkableFields) FieldByType(ty ValueType) *FieldID { + p := l.PointerByType(ty) + if p == nil { + return nil + } + return p.Field.Ref() +} diff --git a/server/pkg/property/schema_builder.go b/server/pkg/property/schema_builder.go new file mode 100644 index 000000000..d0873f914 --- /dev/null +++ b/server/pkg/property/schema_builder.go @@ -0,0 +1,59 @@ +package property + +import ( + "errors" +) + +var ( + ErrInvalidSceneID = errors.New("invalid scene id") + ErrInvalidPropertySchemaID = errors.New("invalid property schema id") + ErrInvalidValue = errors.New("invalid value") + ErrInvalidPropertyLinkableField = errors.New("invalid property linkable field") + ErrInvalidVersion = errors.New("invalid version") +) + +type SchemaBuilder struct { + p *Schema +} + +func NewSchema() *SchemaBuilder { + return &SchemaBuilder{p: &Schema{}} +} + +func (b *SchemaBuilder) Build() (*Schema, error) { + if b.p.id.IsNil() { + return nil, ErrInvalidID + } + if !b.p.linkable.Validate(b.p) { + return nil, ErrInvalidPropertyLinkableField + } + return b.p, nil +} + +func (b *SchemaBuilder) MustBuild() *Schema { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *SchemaBuilder) ID(id SchemaID) *SchemaBuilder { + b.p.id = id + return b +} + +func (b *SchemaBuilder) Version(version int) *SchemaBuilder { + b.p.version = version + return b +} + +func (b *SchemaBuilder) Groups(groups *SchemaGroupList) *SchemaBuilder { + b.p.groups = groups + return b +} + +func (b *SchemaBuilder) LinkableFields(l LinkableFields) *SchemaBuilder { + b.p.linkable = l + return b +} diff --git a/server/pkg/property/schema_builder_test.go b/server/pkg/property/schema_builder_test.go new file mode 100644 index 000000000..447192ca7 --- /dev/null +++ b/server/pkg/property/schema_builder_test.go @@ -0,0 +1,139 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSchemaBuilder_Build(t *testing.T) { + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() + + type args struct { + ID SchemaID + Version int + Groups *SchemaGroupList + Linkable LinkableFields + } + + tests := []struct { + Name string + Args args + Expected *Schema + Err error + }{ + { + Name: "fail: invalid id", + Err: ErrInvalidID, + }, + { + Name: "fail: invalid linkable field", + Args: args{ + ID: MustSchemaID("xx~1.0.0/aa"), + Linkable: LinkableFields{LatLng: &SchemaFieldPointer{Field: FieldID("xx")}}, + }, + Err: ErrInvalidPropertyLinkableField, + }, + { + Name: "success", + Args: args{ + ID: MustSchemaID("xx~1.0.0/aa"), + Groups: NewSchemaGroupList([]*SchemaGroup{sg}), + Version: 1, + }, + Expected: &Schema{ + id: MustSchemaID("xx~1.0.0/aa"), + version: 1, + groups: NewSchemaGroupList([]*SchemaGroup{sg}), + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := NewSchema(). + ID(tt.Args.ID). + Groups(tt.Args.Groups). + Version(tt.Args.Version). + LinkableFields(tt.Args.Linkable). + Build() + + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) + } else { + assert.Equal(t, tt.Err, err) + } + }) + } +} + +func TestSchemaBuilder_MustBuild(t *testing.T) { + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() + + type args struct { + ID SchemaID + Version int + Groups *SchemaGroupList + Linkable LinkableFields + } + + tests := []struct { + Name string + Args args + Expected *Schema + Err string + }{ + { + Name: "fail: invalid id", + Err: ErrInvalidID.Error(), + }, + { + Name: "fail: invalid linkable field", + Args: args{ + ID: MustSchemaID("xx~1.0.0/aa"), + Linkable: LinkableFields{LatLng: &SchemaFieldPointer{Field: FieldID("xx")}}, + }, + Err: ErrInvalidPropertyLinkableField.Error(), + }, + { + Name: "success", + Args: args{ + ID: MustSchemaID("xx~1.0.0/aa"), + Groups: NewSchemaGroupList([]*SchemaGroup{sg}), + Version: 1, + }, + Expected: &Schema{ + id: MustSchemaID("xx~1.0.0/aa"), + version: 1, + groups: NewSchemaGroupList([]*SchemaGroup{sg}), + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + build := func() *Schema { + t.Helper() + return NewSchema(). + ID(tc.Args.ID). + Groups(tc.Args.Groups). + Version(tc.Args.Version). + LinkableFields(tc.Args.Linkable). + MustBuild() + } + + if tc.Err != "" { + assert.PanicsWithError(t, tc.Err, func() { _ = build() }) + } else { + assert.Equal(t, tc.Expected, build()) + } + }) + } +} diff --git a/server/pkg/property/schema_field.go b/server/pkg/property/schema_field.go new file mode 100644 index 000000000..811432c28 --- /dev/null +++ b/server/pkg/property/schema_field.go @@ -0,0 +1,197 @@ +package property + +import ( + "github.com/reearth/reearth-backend/pkg/i18n" +) + +type SchemaField struct { + id FieldID + propertyType ValueType + title i18n.String + description i18n.String + prefix string + suffix string + defaultValue *Value + ui SchemaFieldUI + min *float64 + max *float64 + choices []SchemaFieldChoice + cond *Condition +} + +type SchemaFieldChoice struct { + Key string + Title i18n.String + Icon string +} + +func (p *SchemaField) ID() FieldID { + if p == nil { + return "" + } + return p.id +} + +func (p *SchemaField) Type() ValueType { + if p == nil { + return ValueTypeUnknown + } + return p.propertyType +} + +func (p *SchemaField) Title() i18n.String { + if p == nil { + return nil + } + return p.title.Clone() +} + +func (p *SchemaField) Description() i18n.String { + if p == nil { + return nil + } + return p.description.Clone() +} + +func (p *SchemaField) Prefix() string { + if p == nil { + return "" + } + return p.prefix +} + +func (p *SchemaField) Suffix() string { + if p == nil { + return "" + } + return p.suffix +} + +func (p *SchemaField) DefaultValue() *Value { + if p == nil { + return nil + } + return p.defaultValue.Clone() +} + +func (p *SchemaField) UI() *SchemaFieldUI { + if p == nil || p.ui == "" { + return nil + } + ui := p.ui + return &ui +} + +func (p *SchemaField) Min() *float64 { + if p == nil || p.min == nil { + return nil + } + min := *p.min + return &min +} + +func (p *SchemaField) Max() *float64 { + if p == nil || p.max == nil { + return nil + } + max := *p.max + return &max +} + +func (p *SchemaField) MinMax() (*float64, *float64) { + if p == nil { + return nil, nil + } + return p.Min(), p.Max() +} + +func (p *SchemaField) Choices() []SchemaFieldChoice { + if p == nil { + return nil + } + if p.choices == nil { + return p.choices + } + return append([]SchemaFieldChoice{}, p.choices...) +} + +func (p *SchemaField) Choice(key string) *SchemaFieldChoice { + if p == nil || p.choices == nil { + return nil + } + for _, c := range p.choices { + if c.Key == key { + return &c + } + } + return nil +} + +func (p *SchemaField) IsAvailableIf() *Condition { + if p == nil { + return nil + } + return p.cond.Clone() +} + +func (p *SchemaField) Validate(value *OptionalValue) bool { + if p == nil || value == nil || p.propertyType != value.Type() { + return false + } + switch v := value.Value().Value().(type) { + case float64: + if min := p.Min(); min != nil { + if v < *min { + return false + } + } + if max := p.Max(); max != nil { + if v > *max { + return false + } + } + case string: + if choices := p.Choices(); choices != nil { + ok := false + for _, k := range choices { + if k.Key == v { + ok = true + break + } + } + if !ok { + return false + } + } + } + return true +} + +func (p *SchemaField) SetTitle(title i18n.String) { + if p == nil { + return + } + p.title = title.Clone() +} + +func (p *SchemaField) SetDescription(des i18n.String) { + if p == nil { + return + } + p.description = des.Clone() +} + +func (c *SchemaFieldChoice) SetTitle(l i18n.String) { + if c == nil { + return + } + c.Title = l.Clone() +} + +func (c SchemaFieldChoice) Copy() SchemaFieldChoice { + return SchemaFieldChoice{ + Icon: c.Icon, + Key: c.Key, + Title: c.Title.Clone(), + } +} diff --git a/server/pkg/property/schema_field_builder.go b/server/pkg/property/schema_field_builder.go new file mode 100644 index 000000000..c38d6de2f --- /dev/null +++ b/server/pkg/property/schema_field_builder.go @@ -0,0 +1,141 @@ +package property + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/i18n" +) + +type SchemaFieldBuilder struct { + p *SchemaField +} + +func NewSchemaField() *SchemaFieldBuilder { + return &SchemaFieldBuilder{p: &SchemaField{}} +} + +func (b *SchemaFieldBuilder) Build() (*SchemaField, error) { + if b.p.id.String() == "" || b.p.id.String() == "id" { + return nil, ErrInvalidID + } + if b.p.ui != SchemaFieldUI("") && SchemaFieldUIFrom(string(b.p.ui)) == SchemaFieldUI("") { + return nil, errors.New("invalid property schema field ui") + } + if b.p.min != nil && b.p.max != nil && *b.p.min > *b.p.max { + return nil, errors.New("invalid min and max") + } + if ok := b.p.propertyType.Valid(); !ok { + return nil, errors.New("invalid value type") + } + return b.p, nil +} + +func (b *SchemaFieldBuilder) MustBuild() *SchemaField { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *SchemaFieldBuilder) ID(id FieldID) *SchemaFieldBuilder { + b.p.id = id + return b +} + +func (b *SchemaFieldBuilder) Type(propertyType ValueType) *SchemaFieldBuilder { + b.p.propertyType = propertyType + return b +} + +func (b *SchemaFieldBuilder) Name(name i18n.String) *SchemaFieldBuilder { + b.p.title = name.Clone() + return b +} + +func (b *SchemaFieldBuilder) Description(description i18n.String) *SchemaFieldBuilder { + b.p.description = description.Clone() + return b +} + +func (b *SchemaFieldBuilder) Prefix(prefix string) *SchemaFieldBuilder { + b.p.prefix = prefix + return b +} + +func (b *SchemaFieldBuilder) Suffix(suffix string) *SchemaFieldBuilder { + b.p.suffix = suffix + return b +} + +func (b *SchemaFieldBuilder) DefaultValue(v *Value) *SchemaFieldBuilder { + if v == nil { + b.p.defaultValue = nil + } else { + b.p.defaultValue = v.Clone() + } + return b +} + +func (b *SchemaFieldBuilder) UI(ui SchemaFieldUI) *SchemaFieldBuilder { + b.p.ui = ui + return b +} + +func (b *SchemaFieldBuilder) UIRef(ui *SchemaFieldUI) *SchemaFieldBuilder { + if ui == nil { + b.p.ui = SchemaFieldUI("") + } else { + b.p.ui = *ui + } + return b +} + +func (b *SchemaFieldBuilder) Min(min float64) *SchemaFieldBuilder { + m := min + b.p.min = &m + return b +} + +func (b *SchemaFieldBuilder) Max(max float64) *SchemaFieldBuilder { + m := max + b.p.max = &m + return b +} + +func (b *SchemaFieldBuilder) MinRef(min *float64) *SchemaFieldBuilder { + if min == nil { + b.p.min = nil + } else { + m := *min + b.p.min = &m + } + return b +} + +func (b *SchemaFieldBuilder) MaxRef(max *float64) *SchemaFieldBuilder { + if max == nil { + b.p.max = nil + } else { + m := *max + b.p.max = &m + } + return b +} + +func (b *SchemaFieldBuilder) Choices(choices []SchemaFieldChoice) *SchemaFieldBuilder { + if choices == nil { + b.p.choices = nil + } else { + b.p.choices = make([]SchemaFieldChoice, 0, len(choices)) + for _, c := range choices { + b.p.choices = append(b.p.choices, c.Copy()) + } + } + return b +} + +func (b *SchemaFieldBuilder) IsAvailableIf(cond *Condition) *SchemaFieldBuilder { + b.p.cond = cond.Clone() + return b +} diff --git a/server/pkg/property/schema_field_builder_test.go b/server/pkg/property/schema_field_builder_test.go new file mode 100644 index 000000000..6c7a33ef5 --- /dev/null +++ b/server/pkg/property/schema_field_builder_test.go @@ -0,0 +1,96 @@ +package property + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/stretchr/testify/assert" +) + +func TestSchemaFieldBuilder_Build(t *testing.T) { + tests := []struct { + Name string + Id FieldID + PropertyType ValueType + Fname i18n.String + Description i18n.String + Prefix string + Suffix string + DefaultValue *Value + Ui SchemaFieldUI + Min float64 + MinRef *float64 + Max float64 + MaxRef *float64 + Choices []SchemaFieldChoice + Cond *Condition + Expected struct { + Id FieldID + PropertyType ValueType + Fname i18n.String + Description i18n.String + Prefix string + Suffix string + DefaultValue *Value + Ui SchemaFieldUI + Min *float64 + Max *float64 + Choices []SchemaFieldChoice + Cond *Condition + } + Err error + }{ + { + Name: "nil field", + Err: ErrInvalidID, + }, + { + Name: "fail min > max", + Id: FieldID("aa"), + Min: 10, + Max: 1, + Err: errors.New("invalid min and max"), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := NewSchemaField(). + ID(tt.Id).Name(tt.Fname). + IsAvailableIf(tt.Cond). + Type(tt.PropertyType). + Description(tt.Description). + Choices(tt.Choices). + Prefix(tt.Prefix). + Suffix(tt.Suffix). + DefaultValue(tt.DefaultValue). + MaxRef(tt.MaxRef). + MinRef(tt.MinRef). + Min(tt.Min). + Max(tt.Max). + UI(tt.Ui). + UIRef(&tt.Ui). + Build() + + if tt.Err == nil { + assert.Equal(t, tt.Expected.Ui, res.UI()) + assert.Equal(t, tt.Expected.Id, res.ID()) + assert.Equal(t, tt.Expected.Min, res.Min()) + assert.Equal(t, tt.Expected.Max, res.Max()) + assert.Equal(t, tt.Expected.DefaultValue, res.DefaultValue()) + assert.Equal(t, tt.Expected.Description, res.Description()) + assert.Equal(t, tt.Expected.Prefix, res.Prefix()) + assert.Equal(t, tt.Expected.Suffix, res.Suffix()) + assert.Equal(t, tt.Expected.Choices, res.Choices()) + assert.Equal(t, tt.Expected.Cond, res.IsAvailableIf()) + assert.Equal(t, tt.Expected.Fname, res.Title()) + assert.Equal(t, tt.Expected.PropertyType, res.Type()) + } else { + assert.Equal(t, tt.Err, err) + } + }) + } +} diff --git a/server/pkg/property/schema_field_test.go b/server/pkg/property/schema_field_test.go new file mode 100644 index 000000000..8cf0ec5ea --- /dev/null +++ b/server/pkg/property/schema_field_test.go @@ -0,0 +1,250 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/stretchr/testify/assert" +) + +var ( + testSchemaField1 = NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + testSchemaField2 = NewSchemaField().ID("b").Type(ValueTypeNumber).MustBuild() + testSchemaField3 = NewSchemaField().ID("c").Type(ValueTypeLatLng).MustBuild() +) + +func TestSchemaField_MinMax(t *testing.T) { + getFloatRef := func(f float64) *float64 { + return &f + } + + tests := []struct { + Name string + SF *SchemaField + Expected struct { + Min, Max *float64 + } + }{ + { + Name: "get minmax", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).Min(10.0).Max(20.0).MustBuild(), + Expected: struct { + Min, Max *float64 + }{ + Min: getFloatRef(10.0), + Max: getFloatRef(20.0), + }, + }, + { + Name: "nil sf", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + min, max := tc.SF.MinMax() + assert.Equal(t, tc.Expected.Min, min) + assert.Equal(t, tc.Expected.Max, max) + }) + } +} + +func TestSchemaField_Choice(t *testing.T) { + tests := []struct { + Name, Key string + SF *SchemaField + Expected *SchemaFieldChoice + }{ + { + Name: "found", + Key: "xxx", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).Choices([]SchemaFieldChoice{ + { + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + }, + { + Key: "zzz", + Title: i18n.StringFrom("abc"), + Icon: "", + }, + }).MustBuild(), + Expected: &SchemaFieldChoice{ + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + }, + }, + { + Name: "not found", + Key: "aaa", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).Choices([]SchemaFieldChoice{ + { + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + }, + { + Key: "zzz", + Title: i18n.StringFrom("abc"), + Icon: "", + }, + }).MustBuild(), + Expected: nil, + }, + { + Name: "nil sf", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + ch := tc.SF.Choice(tc.Key) + assert.Equal(t, tc.Expected, ch) + }) + } +} + +func TestSchemaField_SetDescription(t *testing.T) { + sf := NewSchemaField().ID("A").Type(ValueTypeNumber).Description(i18n.StringFrom("xx")).MustBuild() + sf.SetDescription(i18n.StringFrom("aa")) + assert.Equal(t, i18n.StringFrom("aa"), sf.Description()) +} + +func TestSchemaField_SetTitle(t *testing.T) { + sf := NewSchemaField().ID("A").Type(ValueTypeNumber).Name(i18n.StringFrom("abc")).MustBuild() + sf.SetTitle(i18n.StringFrom("bb")) + assert.Equal(t, i18n.StringFrom("bb"), sf.Title()) +} + +func TestSchemaField_Validate(t *testing.T) { + tests := []struct { + Name string + SF *SchemaField + Input *OptionalValue + Expected bool + }{ + { + Name: "nil sf", + }, + { + Name: "nil optional value", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).MustBuild(), + Input: nil, + Expected: false, + }, + { + Name: "nil value", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).MustBuild(), + Input: NewOptionalValue(ValueTypeNumber, nil), + Expected: true, + }, + { + Name: "property type != value type", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).MustBuild(), + Input: OptionalValueFrom(ValueTypeBool.ValueFrom(true)), + Expected: false, + }, + { + Name: "property type != value type with nil value", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).MustBuild(), + Input: NewOptionalValue(ValueTypeBool, nil), + Expected: false, + }, + { + Name: "validate min", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).Min(10).MustBuild(), + Input: OptionalValueFrom(ValueTypeNumber.ValueFrom(9)), + Expected: false, + }, + { + Name: "validate max", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).Max(10).MustBuild(), + Input: OptionalValueFrom(ValueTypeNumber.ValueFrom(11)), + Expected: false, + }, + { + Name: "valid string", + SF: NewSchemaField().ID("a").Type(ValueTypeString).Choices([]SchemaFieldChoice{ + { + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + }, + { + Key: "zzz", + Title: i18n.StringFrom("abc"), + Icon: "", + }, + }).MustBuild(), + Input: OptionalValueFrom(ValueTypeString.ValueFrom("xxx")), + Expected: true, + }, + { + Name: "invalid string", + SF: NewSchemaField().ID("a").Type(ValueTypeString).Choices([]SchemaFieldChoice{ + { + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + }, + { + Key: "zzz", + Title: i18n.StringFrom("abc"), + Icon: "", + }, + }).MustBuild(), + Input: OptionalValueFrom(ValueTypeString.ValueFrom("aaa")), + Expected: false, + }, + { + Name: "validate other", + SF: NewSchemaField().ID("A").Type(ValueTypeLatLng).MustBuild(), + Input: OptionalValueFrom(ValueTypeLatLng.ValueFrom(LatLng{Lat: 10, Lng: 11})), + Expected: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.SF.Validate(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestSchemaFieldChoice_SetLabel(t *testing.T) { + sfc := &SchemaFieldChoice{ + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + } + sfc.SetTitle(i18n.StringFrom("aa")) + assert.Equal(t, i18n.StringFrom("aa"), sfc.Title) +} + +func TestSchemaFieldChoice_Copy(t *testing.T) { + sfc := SchemaFieldChoice{ + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + } + copy := sfc.Copy() + assert.Equal(t, sfc, copy) +} + +func TestSchemaField_Nil(t *testing.T) { + var sf *SchemaField + assert.Nil(t, sf.UI()) + assert.Nil(t, sf.DefaultValue()) + assert.Nil(t, sf.IsAvailableIf()) + assert.Nil(t, sf.Max()) + assert.Nil(t, sf.Min()) +} diff --git a/server/pkg/property/schema_field_ui.go b/server/pkg/property/schema_field_ui.go new file mode 100644 index 000000000..1bdddcb40 --- /dev/null +++ b/server/pkg/property/schema_field_ui.go @@ -0,0 +1,65 @@ +package property + +type SchemaFieldUI string + +const ( + SchemaFieldUIMultiline SchemaFieldUI = "multiline" + SchemaFieldUISelection SchemaFieldUI = "selection" + SchemaFieldUIColor SchemaFieldUI = "color" + SchemaFieldUIRange SchemaFieldUI = "range" + SchemaFieldUISlider SchemaFieldUI = "slider" + SchemaFieldUIImage SchemaFieldUI = "image" + SchemaFieldUIVideo SchemaFieldUI = "video" + SchemaFieldUIFile SchemaFieldUI = "file" + SchemaFieldUILayer SchemaFieldUI = "layer" + SchemaFieldUICameraPose SchemaFieldUI = "camera_pose" + SchemaFieldUIDatetTime SchemaFieldUI = "datetime" + // DON'T FORGET ADDING A NEW UI TO schemaFieldUIs ALSO! +) + +var ( + schemaFieldUIs = []SchemaFieldUI{ + SchemaFieldUIMultiline, + SchemaFieldUISelection, + SchemaFieldUIColor, + SchemaFieldUIRange, + SchemaFieldUISlider, + SchemaFieldUIImage, + SchemaFieldUIVideo, + SchemaFieldUIFile, + SchemaFieldUILayer, + SchemaFieldUICameraPose, + SchemaFieldUIDatetTime, + // DON'T FORGET ADDING A NEW UI HERE ALSO! + } +) + +func SchemaFieldUIFrom(ui string) SchemaFieldUI { + psfui := SchemaFieldUI(ui) + for _, u := range schemaFieldUIs { + if u == psfui { + return u + } + } + return "" +} + +func SchemaFieldUIFromRef(ui *string) *SchemaFieldUI { + if ui == nil { + return nil + } + ui2 := SchemaFieldUIFrom(*ui) + return &ui2 +} + +func (p SchemaFieldUI) String() string { + return string(p) +} + +func (p *SchemaFieldUI) StringRef() *string { + if p == nil { + return nil + } + p2 := string(*p) + return &p2 +} diff --git a/server/pkg/property/schema_field_ui_test.go b/server/pkg/property/schema_field_ui_test.go new file mode 100644 index 000000000..1025e1302 --- /dev/null +++ b/server/pkg/property/schema_field_ui_test.go @@ -0,0 +1,19 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSchemaFieldUI(t *testing.T) { + var uir *SchemaFieldUI + assert.Equal(t, SchemaFieldUI(""), SchemaFieldUIFrom("")) + assert.Equal(t, uir, SchemaFieldUIFromRef(nil)) + ui := SchemaFieldUILayer + assert.Equal(t, SchemaFieldUILayer, SchemaFieldUIFrom("layer")) + assert.Equal(t, "layer", SchemaFieldUIFrom("layer").String()) + str := "layer" + assert.Equal(t, &ui, SchemaFieldUIFromRef(&str)) + assert.Equal(t, &str, SchemaFieldUIFromRef(&str).StringRef()) +} diff --git a/server/pkg/property/schema_group.go b/server/pkg/property/schema_group.go new file mode 100644 index 000000000..7f8a89506 --- /dev/null +++ b/server/pkg/property/schema_group.go @@ -0,0 +1,111 @@ +package property + +import ( + "github.com/reearth/reearth-backend/pkg/i18n" +) + +// SchemaGroup represents a group of property that has some fields +type SchemaGroup struct { + id SchemaGroupID + fields []*SchemaField + list bool + isAvailableIf *Condition + title i18n.String + representativeField *FieldID +} + +// ID returns id +func (s *SchemaGroup) ID() SchemaGroupID { + if s == nil { + return SchemaGroupID("") + } + return s.id +} + +func (s *SchemaGroup) IDRef() *SchemaGroupID { + if s == nil { + return nil + } + return s.id.Ref() +} + +// Fields returns a slice of fields +func (s *SchemaGroup) Fields() []*SchemaField { + if s == nil { + return nil + } + return append([]*SchemaField{}, s.fields...) +} + +// Field returns a field whose id is specified +func (s *SchemaGroup) Field(fid FieldID) *SchemaField { + if s == nil { + return nil + } + for _, f := range s.fields { + if f.ID() == fid { + return f + } + } + return nil +} + +// FieldByPointer returns a field whose id is specified +func (s *SchemaGroup) FieldByPointer(ptr *Pointer) *SchemaField { + if s == nil { + return nil + } + fid, ok := ptr.Field() + if !ok { + return nil + } + return s.Field(fid) +} + +func (s *SchemaGroup) HasField(i FieldID) bool { + return s.Field(i) != nil +} + +// IsList returns true if this group is list +func (s *SchemaGroup) IsList() bool { + if s == nil { + return false + } + return s.list +} + +// IsAvailableIf returns condition of availability +func (s *SchemaGroup) IsAvailableIf() *Condition { + if s == nil { + return nil + } + return s.isAvailableIf.Clone() +} + +// Title returns a title of the group +func (s *SchemaGroup) Title() i18n.String { + if s == nil { + return nil + } + return s.title.Clone() +} + +// RepresentativeFieldID returns the representative field ID of the group +func (s *SchemaGroup) RepresentativeFieldID() *FieldID { + if s == nil { + return nil + } + return s.representativeField +} + +// RepresentativeField returns the representative field of the group +func (s *SchemaGroup) RepresentativeField() *SchemaField { + if s == nil || s.representativeField == nil { + return nil + } + return s.Field(*s.representativeField) +} + +func (s *SchemaGroup) SetTitle(t i18n.String) { + s.title = t.Clone() +} diff --git a/server/pkg/property/schema_group_builder.go b/server/pkg/property/schema_group_builder.go new file mode 100644 index 000000000..b0277026e --- /dev/null +++ b/server/pkg/property/schema_group_builder.go @@ -0,0 +1,77 @@ +package property + +import ( + "github.com/reearth/reearth-backend/pkg/i18n" +) + +type SchemaGroupBuilder struct { + p *SchemaGroup +} + +func NewSchemaGroup() *SchemaGroupBuilder { + return &SchemaGroupBuilder{ + p: &SchemaGroup{}, + } +} + +func (b *SchemaGroupBuilder) Build() (*SchemaGroup, error) { + if b.p.id == "" { + return nil, ErrInvalidID + } + return b.p, nil +} + +func (b *SchemaGroupBuilder) MustBuild() *SchemaGroup { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *SchemaGroupBuilder) ID(id SchemaGroupID) *SchemaGroupBuilder { + b.p.id = id + return b +} + +func (b *SchemaGroupBuilder) Fields(fields []*SchemaField) *SchemaGroupBuilder { + if len(fields) == 0 { + b.p.fields = nil + return b + } + + newFields := []*SchemaField{} + ids := map[FieldID]struct{}{} + for _, f := range fields { + if f == nil { + continue + } + if _, ok := ids[f.ID()]; ok { + continue + } + ids[f.ID()] = struct{}{} + newFields = append(newFields, f) + } + b.p.fields = newFields + return b +} + +func (b *SchemaGroupBuilder) IsList(list bool) *SchemaGroupBuilder { + b.p.list = list + return b +} + +func (b *SchemaGroupBuilder) IsAvailableIf(cond *Condition) *SchemaGroupBuilder { + b.p.isAvailableIf = cond.Clone() + return b +} + +func (b *SchemaGroupBuilder) Title(title i18n.String) *SchemaGroupBuilder { + b.p.title = title.Clone() + return b +} + +func (b *SchemaGroupBuilder) RepresentativeField(representativeField *FieldID) *SchemaGroupBuilder { + b.p.representativeField = representativeField.CloneRef() + return b +} diff --git a/server/pkg/property/schema_group_builder_test.go b/server/pkg/property/schema_group_builder_test.go new file mode 100644 index 000000000..9a537f981 --- /dev/null +++ b/server/pkg/property/schema_group_builder_test.go @@ -0,0 +1,102 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/stretchr/testify/assert" +) + +func TestSchemaGroupBuilder_Build(t *testing.T) { + gid := SchemaGroupID("xx") + sf := NewSchemaField().ID("ff").Type(ValueTypeString).MustBuild() + + type expected struct { + ID SchemaGroupID + Fields []*SchemaField + List bool + IsAvailableIf *Condition + Title i18n.String + } + + tests := []struct { + Name string + ID SchemaGroupID + Fields []*SchemaField + List bool + IsAvailableIf *Condition + Title i18n.String + Expected expected + Err error + }{ + { + Name: "fail: invalid id", + Err: ErrInvalidID, + }, + { + Name: "success", + ID: gid, + Fields: []*SchemaField{sf, nil, sf}, + List: true, + IsAvailableIf: &Condition{ + Field: "ff", + Value: ValueTypeString.ValueFrom("abc"), + }, + Title: i18n.StringFrom("tt"), + Expected: expected{ + ID: gid, + Fields: []*SchemaField{sf}, + List: true, + IsAvailableIf: &Condition{ + Field: "ff", + Value: ValueTypeString.ValueFrom("abc"), + }, + Title: i18n.StringFrom("tt"), + }, + }, + { + Name: "success: nil name", + ID: gid, + Fields: []*SchemaField{sf}, + List: true, + IsAvailableIf: &Condition{ + Field: "ff", + Value: ValueTypeString.ValueFrom("abc"), + }, + Title: i18n.StringFrom("tt"), + Expected: expected{ + ID: gid, + Fields: []*SchemaField{sf}, + List: true, + IsAvailableIf: &Condition{ + Field: "ff", + Value: ValueTypeString.ValueFrom("abc"), + }, + Title: i18n.StringFrom("tt"), + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res, err := NewSchemaGroup(). + ID(tc.ID). + Fields(tc.Fields). + IsList(tc.List). + Title(tc.Title). + IsAvailableIf(tc.IsAvailableIf). + Build() + if tc.Err == nil { + assert.Equal(t, tc.Expected.IsAvailableIf, res.IsAvailableIf()) + assert.Equal(t, tc.Expected.ID, res.ID()) + assert.Equal(t, tc.Expected.Title, res.Title()) + assert.Equal(t, tc.Expected.List, res.IsList()) + assert.Equal(t, tc.Expected.Fields, res.Fields()) + } else { + assert.Equal(t, tc.Err, err) + } + }) + } +} diff --git a/server/pkg/property/schema_group_list.go b/server/pkg/property/schema_group_list.go new file mode 100644 index 000000000..195a951d2 --- /dev/null +++ b/server/pkg/property/schema_group_list.go @@ -0,0 +1,146 @@ +package property + +type SchemaGroupList struct { + groups []*SchemaGroup +} + +func NewSchemaGroupList(p []*SchemaGroup) *SchemaGroupList { + sgl := &SchemaGroupList{ + groups: append(p[:0:0], p...), + } + if len(sgl.duplicatedGroups()) > 0 { + return nil + } + return sgl +} + +func (p *SchemaGroupList) Len() int { + if p == nil { + return 0 + } + return len(p.groups) +} + +func (p *SchemaGroupList) Groups() []*SchemaGroup { + if p == nil { + return nil + } + return append(p.groups[:0:0], p.groups...) +} + +func (p *SchemaGroupList) Fields() []*SchemaField { + if p == nil { + return nil + } + + fields := []*SchemaField{} + for _, g := range p.groups { + fields = append(fields, g.Fields()...) + } + return fields +} + +func (p *SchemaGroupList) GroupAndFields() []SchemaGroupAndField { + if p == nil { + return nil + } + fields := []SchemaGroupAndField{} + for _, g := range p.groups { + for _, f := range g.Fields() { + fields = append(fields, SchemaGroupAndField{Group: g, Field: f}) + } + } + return fields +} + +func (p *SchemaGroupList) Field(id FieldID) *SchemaField { + if p == nil { + return nil + } + + for _, g := range p.groups { + if f := g.Field(id); f != nil { + return f + } + } + return nil +} + +func (p *SchemaGroupList) Group(id SchemaGroupID) *SchemaGroup { + if p == nil { + return nil + } + + for _, f := range p.groups { + if f.ID() == id { + return f + } + } + return nil +} + +func (p *SchemaGroupList) GroupByField(id FieldID) *SchemaGroup { + if p == nil { + return nil + } + + for _, f := range p.groups { + if f.HasField(id) { + return f + } + } + + return nil +} + +func (p *SchemaGroupList) GroupAndField(f FieldID) *SchemaGroupAndField { + if p == nil { + return nil + } + for _, g := range p.groups { + if gf := g.Field(f); gf != nil { + return &SchemaGroupAndField{Group: g, Field: gf} + } + } + return nil +} + +func (s *SchemaGroupList) duplicatedGroups() []SchemaGroupID { + if s == nil { + return nil + } + + var duplicated []SchemaGroupID + ids := map[SchemaGroupID]struct{}{} + for _, f := range s.Groups() { + i := f.ID() + if _, ok := ids[i]; ok { + duplicated = append(duplicated, i) + } + ids[i] = struct{}{} + } + return duplicated +} + +type SchemaGroupAndField struct { + Group *SchemaGroup + Field *SchemaField +} + +func (gf SchemaGroupAndField) IsEmpty() bool { + return gf.Group == nil && gf.Field == nil +} + +func (gf SchemaGroupAndField) Pointer() *Pointer { + if gf.Group == nil && gf.Field == nil { + return nil + } + return NewPointer(gf.Group.ID().Ref(), nil, gf.Field.ID().Ref()) +} + +func (f SchemaGroupAndField) SchemaFieldPointer() SchemaFieldPointer { + return SchemaFieldPointer{ + SchemaGroup: f.Group.ID(), + Field: f.Field.ID(), + } +} diff --git a/server/pkg/property/schema_group_list_test.go b/server/pkg/property/schema_group_list_test.go new file mode 100644 index 000000000..12627e0fb --- /dev/null +++ b/server/pkg/property/schema_group_list_test.go @@ -0,0 +1,376 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +var ( + testSchemaGroupList1 = NewSchemaGroupList([]*SchemaGroup{testSchemaGroup1, testSchemaGroup2}) +) + +func TestNewSchemaGroupList(t *testing.T) { + type args struct { + p []*SchemaGroup + } + tests := []struct { + name string + args args + want *SchemaGroupList + }{ + { + name: "ok", + args: args{ + p: []*SchemaGroup{testSchemaGroup1, testSchemaGroup2}, + }, + want: &SchemaGroupList{groups: []*SchemaGroup{testSchemaGroup1, testSchemaGroup2}}, + }, + { + name: "duplicated groups", + args: args{ + p: []*SchemaGroup{testSchemaGroup1, testSchemaGroup1}, + }, + want: nil, + }, + { + name: "nil", + args: args{ + p: nil, + }, + want: &SchemaGroupList{groups: nil}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, NewSchemaGroupList(tt.args.p)) + }) + } +} + +func TestSchemaGroupList_Field(t *testing.T) { + tests := []struct { + name string + target *SchemaGroupList + input FieldID + want *SchemaField + }{ + { + name: "nil schema", + }, + { + name: "found", + target: testSchemaGroupList1, + input: testSchemaField1.ID(), + want: testSchemaField1, + }, + { + name: "not found", + target: testSchemaGroupList1, + input: FieldID("zz"), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Field(tt.input)) + }) + } +} + +func TestSchemaGroupList_Group(t *testing.T) { + tests := []struct { + name string + target *SchemaGroupList + input SchemaGroupID + want *SchemaGroup + }{ + { + name: "nil schema", + target: nil, + input: testSchemaGroup1.ID(), + want: nil, + }, + { + name: "found", + target: testSchemaGroupList1, + input: testSchemaGroup1.ID(), + want: testSchemaGroup1, + }, + { + name: "not found", + target: testSchemaGroupList1, + input: SchemaGroupID("zz"), + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Group(tt.input)) + }) + } +} + +func TestSchemaGroupList_GroupByField(t *testing.T) { + tests := []struct { + name string + target *SchemaGroupList + input FieldID + want *SchemaGroup + }{ + { + name: "nil schema", + target: nil, + input: testSchemaField1.ID(), + want: nil, + }, + { + name: "found", + target: testSchemaGroupList1, + input: testSchemaField1.ID(), + want: testSchemaGroup1, + }, + { + name: "not found", + target: testSchemaGroupList1, + input: FieldID("zz"), + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.GroupByField(tt.input)) + }) + } +} + +func TestSchemaGroupList_GroupAndFields(t *testing.T) { + tests := []struct { + name string + target *SchemaGroupList + want []SchemaGroupAndField + }{ + { + name: "ok", + target: testSchemaGroupList1, + want: []SchemaGroupAndField{ + {Group: testSchemaGroup1, Field: testSchemaField1}, + {Group: testSchemaGroup1, Field: testSchemaField2}, + {Group: testSchemaGroup2, Field: testSchemaField3}, + }, + }, + { + name: "empty", + target: &SchemaGroupList{}, + want: []SchemaGroupAndField{}, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.GroupAndFields() + assert.Equal(t, tt.want, res) + if len(tt.want) > 0 { + for i, gf := range res { + assert.Same(t, tt.want[i].Group, gf.Group) + assert.Same(t, tt.want[i].Field, gf.Field) + } + } + }) + } +} + +func TestSchemaGroupList_GroupAndField(t *testing.T) { + type args struct { + f FieldID + } + tests := []struct { + name string + args args + target *SchemaGroupList + want *SchemaGroupAndField + }{ + { + name: "ok1", + target: testSchemaGroupList1, + args: args{f: testSchemaField1.ID()}, + want: &SchemaGroupAndField{Group: testSchemaGroup1, Field: testSchemaField1}, + }, + { + name: "ok2", + target: testSchemaGroupList1, + args: args{f: testSchemaField2.ID()}, + want: &SchemaGroupAndField{Group: testSchemaGroup1, Field: testSchemaField2}, + }, + { + name: "ok3", + target: testSchemaGroupList1, + args: args{f: testSchemaField3.ID()}, + want: &SchemaGroupAndField{Group: testSchemaGroup2, Field: testSchemaField3}, + }, + { + name: "not found", + target: testSchemaGroupList1, + args: args{f: "ddd"}, + want: nil, + }, + { + name: "empty", + target: &SchemaGroupList{}, + want: nil, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.GroupAndField(tt.args.f) + assert.Equal(t, tt.want, res) + if tt.want != nil { + assert.Same(t, tt.want.Group, res.Group) + assert.Same(t, tt.want.Field, res.Field) + } + }) + } +} + +func TestSchemaGroupAndField_IsEmpty(t *testing.T) { + tests := []struct { + name string + target SchemaGroupAndField + want bool + }{ + { + name: "present", + target: SchemaGroupAndField{ + Group: testSchemaGroup1, + Field: testSchemaField1, + }, + want: false, + }, + { + name: "empty", + target: SchemaGroupAndField{}, + want: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gf := SchemaGroupAndField{ + Group: tt.target.Group, + Field: tt.target.Field, + } + assert.Equal(t, tt.want, gf.IsEmpty()) + }) + } +} + +func TestSchemaGroupAndField_Pointer(t *testing.T) { + tests := []struct { + name string + target SchemaGroupAndField + want *Pointer + }{ + { + name: "ok", + target: SchemaGroupAndField{ + Group: testSchemaGroup1, + Field: testSchemaField1, + }, + want: &Pointer{ + schemaGroup: testSchemaGroup1.ID().Ref(), + item: nil, + field: testSchemaField1.ID().Ref(), + }, + }, + { + name: "nil group", + target: SchemaGroupAndField{ + Group: nil, + Field: testSchemaField1, + }, + want: &Pointer{ + schemaGroup: nil, + item: nil, + field: testSchemaField1.ID().Ref(), + }, + }, + { + name: "nil field", + target: SchemaGroupAndField{ + Group: testSchemaGroup1, + Field: nil, + }, + want: &Pointer{ + schemaGroup: testSchemaGroup1.ID().Ref(), + item: nil, + field: nil, + }, + }, + { + name: "empty", + target: SchemaGroupAndField{}, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Pointer()) + }) + } +} + +func TestSchemaGroupAndField_SchemaFieldPointer(t *testing.T) { + tests := []struct { + name string + target SchemaGroupAndField + want SchemaFieldPointer + }{ + { + name: "ok", + target: SchemaGroupAndField{ + Group: testSchemaGroup1, + Field: testSchemaField1, + }, + want: SchemaFieldPointer{ + SchemaGroup: testSchemaGroup1.ID(), + Field: testSchemaField1.ID(), + }, + }, + { + name: "empty", + target: SchemaGroupAndField{}, + want: SchemaFieldPointer{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.SchemaFieldPointer()) + }) + } +} diff --git a/server/pkg/property/schema_group_test.go b/server/pkg/property/schema_group_test.go new file mode 100644 index 000000000..383d69107 --- /dev/null +++ b/server/pkg/property/schema_group_test.go @@ -0,0 +1,112 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/stretchr/testify/assert" +) + +var ( + testSchemaGroup1 = NewSchemaGroup().ID("aa").Fields([]*SchemaField{testSchemaField1, testSchemaField2}).MustBuild() + testSchemaGroup2 = NewSchemaGroup().ID("bb").Fields([]*SchemaField{testSchemaField3}).IsList(true).MustBuild() +) + +func TestSchemaGroup(t *testing.T) { + scid := SchemaGroupID("aa") + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + + tests := []struct { + Name string + G *SchemaGroup + Expected struct { + GIDRef *SchemaGroupID + GID SchemaGroupID + Fields []*SchemaField + Title i18n.String + IsAvailableIf *Condition + IsList bool + } + }{ + { + Name: "nil schema group", + }, + { + Name: "success", + G: NewSchemaGroup().ID(scid).Fields([]*SchemaField{sf}).MustBuild(), + Expected: struct { + GIDRef *SchemaGroupID + GID SchemaGroupID + Fields []*SchemaField + Title i18n.String + IsAvailableIf *Condition + IsList bool + }{ + GIDRef: scid.Ref(), + GID: scid, + Fields: []*SchemaField{sf}, + Title: nil, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + assert.Equal(t, tc.Expected.GID, tc.G.ID()) + assert.Equal(t, tc.Expected.GIDRef, tc.G.IDRef()) + assert.Equal(t, tc.Expected.Fields, tc.G.Fields()) + assert.Equal(t, tc.Expected.IsList, tc.G.IsList()) + assert.Equal(t, tc.Expected.IsAvailableIf, tc.G.IsAvailableIf()) + assert.Equal(t, tc.Expected.Title, tc.G.Title()) + }) + } +} + +func TestSchemaGroup_Field(t *testing.T) { + scid := SchemaGroupID("aa") + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + + tests := []struct { + Name string + G *SchemaGroup + PTR *Pointer + Input FieldID + Expected *SchemaField + }{ + { + Name: "nil schema group", + }, + { + Name: "found", + G: NewSchemaGroup().ID(scid).Fields([]*SchemaField{sf}).MustBuild(), + PTR: NewPointer(nil, nil, sf.ID().Ref()), + Input: sf.ID(), + Expected: sf, + }, + { + Name: "not found", + G: NewSchemaGroup().ID(scid).Fields([]*SchemaField{sf}).MustBuild(), + PTR: NewPointer(nil, nil, FieldID("zz").Ref()), + Input: FieldID("zz"), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.G.Field(tc.Input)) + assert.Equal(t, tc.Expected, tc.G.FieldByPointer(tc.PTR)) + assert.Equal(t, tc.Expected != nil, tc.G.HasField(tc.Input)) + }) + } +} + +func TestSchemaGroup_SetTitle(t *testing.T) { + sg := NewSchemaGroup().ID(SchemaGroupID("aa")).Fields([]*SchemaField{sf}).MustBuild() + sg.SetTitle(i18n.StringFrom("ttt")) + assert.Equal(t, i18n.StringFrom("ttt"), sg.Title()) +} diff --git a/server/pkg/property/schema_list.go b/server/pkg/property/schema_list.go new file mode 100644 index 000000000..322523c2b --- /dev/null +++ b/server/pkg/property/schema_list.go @@ -0,0 +1,92 @@ +package property + +type SchemaList []*Schema + +func (l SchemaList) Find(psid SchemaID) *Schema { + for _, s := range l { + if s != nil && s.ID().Equal(psid) { + return s + } + } + return nil +} + +func (l SchemaList) Map() SchemaMap { + return SchemaMapFrom(l) +} + +func (l SchemaList) Loader() SchemaLoader { + return SchemaLoaderFromMap(l.Map()) +} + +func (l SchemaList) Concat(m SchemaList) SchemaList { + return append(l, m...) +} + +func (l SchemaList) MapToIDs(ids []SchemaID) SchemaList { + results := make(SchemaList, 0, len(ids)) + for _, id := range ids { + results = append(results, l.Find(id)) + } + return results +} + +type SchemaMap map[SchemaID]*Schema + +func SchemaMapFrom(l []*Schema) SchemaMap { + m := make(SchemaMap, len(l)) + m.Add(l...) + return m +} + +func (m SchemaMap) Add(schemas ...*Schema) { + if m == nil { + return + } + for _, p := range schemas { + if p == nil { + continue + } + m[p.ID()] = p + } +} + +func (m SchemaMap) List() SchemaList { + if m == nil { + return nil + } + list := make(SchemaList, 0, len(m)) + for _, l := range m { + list = append(list, l) + } + return list +} + +func (m SchemaMap) Clone() SchemaMap { + if m == nil { + return SchemaMap{} + } + m2 := make(SchemaMap, len(m)) + for k, v := range m { + m2[k] = v + } + return m2 +} + +func (m SchemaMap) Merge(m2 SchemaMap) SchemaMap { + if m == nil { + return nil + } + m3 := m.Clone() + if m2 == nil { + return m3 + } + + m3.Add(m2.List()...) + + return m3 +} + +func (m SchemaMap) Loader() SchemaLoader { + return SchemaLoaderFromMap(m) +} diff --git a/server/pkg/property/schema_list_test.go b/server/pkg/property/schema_list_test.go new file mode 100644 index 000000000..cb22dc673 --- /dev/null +++ b/server/pkg/property/schema_list_test.go @@ -0,0 +1,48 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSchemaList_Find(t *testing.T) { + p1 := &Schema{id: MustSchemaID("foo~1.0.0/a")} + p2 := &Schema{id: MustSchemaID("bar~1.0.0/a")} + assert.Equal(t, p1, SchemaList{p1, p2}.Find(p1.ID())) + assert.Nil(t, SchemaList{p1, p2}.Find(MustSchemaID("hoge~1.0.0/a"))) + assert.Nil(t, SchemaList(nil).Find(p1.ID())) +} + +func TestSchemaList_Concat(t *testing.T) { + p1 := &Schema{id: MustSchemaID("foo~1.0.0/a")} + p2 := &Schema{id: MustSchemaID("bar~1.0.0/a")} + assert.Equal(t, SchemaList{p1, p2, p2}, SchemaList{p1, p2}.Concat(SchemaList{p2})) + assert.Equal(t, SchemaList{p1}, SchemaList(nil).Concat(SchemaList{p1})) + assert.Equal(t, SchemaList{p1}, SchemaList{p1}.Concat(nil)) +} + +func TestSchemaList_Map(t *testing.T) { + p1 := &Schema{id: MustSchemaID("foo~1.0.0/a")} + p2 := &Schema{id: MustSchemaID("bar~1.0.0/a")} + assert.Equal(t, SchemaMap{p1.ID(): p1, p2.ID(): p2}, SchemaList{p1, p2}.Map()) + assert.Equal(t, SchemaMap{}, SchemaList(nil).Map()) +} + +func TestSchemaList_MapToIDs(t *testing.T) { + p1 := &Schema{id: MustSchemaID("foo~1.0.0/a")} + p2 := &Schema{id: MustSchemaID("bar~1.0.0/a")} + assert.Equal(t, SchemaList{nil, p2}, SchemaList{p1, p2}.MapToIDs([]SchemaID{MustSchemaID("hoge~1.0.0/a"), p2.ID()})) + assert.Equal(t, SchemaList{}, SchemaList{p1, p2}.MapToIDs(nil)) + assert.Equal(t, SchemaList{nil}, SchemaList(nil).MapToIDs([]SchemaID{p1.ID()})) +} + +func TestSchemaMap_List(t *testing.T) { + p1 := &Schema{id: MustSchemaID("foo~1.0.0/a")} + p2 := &Schema{id: MustSchemaID("bar~1.0.0/a")} + list := SchemaMap{p1.ID(): p1, p2.ID(): p2}.List() + assert.Len(t, list, 2) + assert.Contains(t, list, p1) + assert.Contains(t, list, p2) + assert.Nil(t, SchemaMap(nil).List()) +} diff --git a/server/pkg/property/schema_pointer.go b/server/pkg/property/schema_pointer.go new file mode 100644 index 000000000..8d0431c1e --- /dev/null +++ b/server/pkg/property/schema_pointer.go @@ -0,0 +1,18 @@ +package property + +type SchemaFieldPointer struct { + SchemaGroup SchemaGroupID + Field FieldID +} + +func (p SchemaFieldPointer) Pointer() *Pointer { + return PointFieldBySchemaGroup(p.SchemaGroup, p.Field) +} + +func (p *SchemaFieldPointer) Clone() *SchemaFieldPointer { + if p == nil { + return p + } + p2 := *p + return &p2 +} diff --git a/server/pkg/property/schema_pointer_test.go b/server/pkg/property/schema_pointer_test.go new file mode 100644 index 000000000..60643e153 --- /dev/null +++ b/server/pkg/property/schema_pointer_test.go @@ -0,0 +1,36 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSchemaFieldPointer_Pointer(t *testing.T) { + tests := []struct { + name string + target *SchemaFieldPointer + want *Pointer + }{ + { + name: "ok", + target: &SchemaFieldPointer{ + SchemaGroup: SchemaGroupID("a"), + Field: FieldID("b"), + }, + want: &Pointer{ + schemaGroup: SchemaGroupID("a").Ref(), + item: nil, + field: FieldID("b").Ref(), + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Pointer()) + }) + } +} diff --git a/server/pkg/property/schema_test.go b/server/pkg/property/schema_test.go new file mode 100644 index 000000000..4e5739075 --- /dev/null +++ b/server/pkg/property/schema_test.go @@ -0,0 +1,69 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +var ( + testSchema1 = NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups( + NewSchemaGroupList([]*SchemaGroup{testSchemaGroup1, testSchemaGroup2}), + ).MustBuild() +) + +func TestLinkableField_Validate(t *testing.T) { + sid := id.MustPropertySchemaID("xx~1.0.0/aa") + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() + + tests := []struct { + Name string + S *Schema + LF LinkableFields + Expected bool + }{ + { + Name: "nil schema", + S: nil, + LF: LinkableFields{}, + Expected: false, + }, + { + Name: "invalid: URL", + S: NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), + LF: LinkableFields{ + URL: &SchemaFieldPointer{ + Field: FieldID("xx"), + }, + }, + Expected: false, + }, + { + Name: "invalid: Lng", + S: NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), + LF: LinkableFields{ + LatLng: &SchemaFieldPointer{ + Field: FieldID("xx"), + }, + }, + Expected: false, + }, + { + Name: "empty", + S: NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), + LF: LinkableFields{}, + Expected: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.LF.Validate(tt.S) + assert.Equal(t, tt.Expected, res) + }) + } +} diff --git a/server/pkg/property/sealed.go b/server/pkg/property/sealed.go new file mode 100644 index 000000000..43c3bc52c --- /dev/null +++ b/server/pkg/property/sealed.go @@ -0,0 +1,263 @@ +package property + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/dataset" +) + +type Sealed struct { + Original *ID + Parent *ID + Schema SchemaID + LinkedDataset *DatasetID + Items []*SealedItem +} + +type SealedItem struct { + Original *ItemID + Parent *ItemID + SchemaGroup SchemaGroupID + LinkedDataset *DatasetID + Fields []*SealedField + Groups []*SealedItem +} + +type SealedField struct { + ID FieldID + Val *ValueAndDatasetValue +} + +func (f *SealedField) Value() *Value { + if f == nil { + return nil + } + return f.Val.Value() +} + +func Seal(ctx context.Context, p *Merged, d dataset.GraphLoader) (*Sealed, error) { + if p == nil { + return nil, nil + } + items := make([]*SealedItem, 0, len(p.Groups)) + for _, g := range p.Groups { + i, err := sealedItemFrom(ctx, g, d) + if err != nil { + return nil, err + } + items = append(items, i) + } + + return &Sealed{ + Original: p.Original.CopyRef(), + Parent: p.Parent.CopyRef(), + Schema: p.Schema, + LinkedDataset: p.LinkedDataset.CopyRef(), + Items: items, + }, nil +} + +func SealProperty(ctx context.Context, p *Property) *Sealed { + if p == nil { + return nil + } + m := Merge(p, nil, nil) + s, _ := Seal(ctx, m, nil) + return s +} + +func sealedItemFrom(ctx context.Context, g *MergedGroup, d dataset.GraphLoader) (item *SealedItem, err error) { + if g == nil { + return + } + + item = &SealedItem{ + Original: g.Original.CopyRef(), + Parent: g.Parent.CopyRef(), + SchemaGroup: g.SchemaGroup, + LinkedDataset: g.LinkedDataset.CopyRef(), + } + + if len(g.Groups) > 0 { + item.Groups, err = sealedGroupList(ctx, g.Groups, d) + } else if len(g.Fields) > 0 { + item.Fields, err = sealedGroup(ctx, g.Fields, d) + } + + return +} + +func sealedGroupList(ctx context.Context, gl []*MergedGroup, d dataset.GraphLoader) ([]*SealedItem, error) { + res := make([]*SealedItem, 0, len(gl)) + for _, g := range gl { + sg, err := sealedItemFrom(ctx, g, d) + if err != nil { + return nil, err + } + res = append(res, sg) + } + return res, nil +} + +func sealedGroup(ctx context.Context, fields []*MergedField, d dataset.GraphLoader) ([]*SealedField, error) { + res := []*SealedField{} + for _, f := range fields { + dv, err := f.DatasetValue(ctx, d) + if err != nil { + return nil, err + } + + if val := NewValueAndDatasetValue(f.Type, dv.Clone(), f.Value.Clone()); val != nil { + res = append(res, &SealedField{ + ID: f.ID, + Val: val, + }) + } + } + return res, nil +} + +func (s *Sealed) Interface() map[string]interface{} { + if s == nil { + return nil + } + + res := map[string]interface{}{} + for _, item := range s.Items { + i := item.Interface() + if i != nil { + res[item.SchemaGroup.String()] = i + } + } + + return res +} + +func (s *SealedItem) Interface() interface{} { + if s == nil { + return nil + } + + if len(s.Groups) > 0 { + items := make([]map[string]interface{}, 0, len(s.Groups)) + for _, g := range s.Groups { + i := sealedFieldsInterface(g.Fields) + if g.Original != nil { + i["id"] = g.Original.String() + } + items = append(items, i) + } + return items + } + + return sealedFieldsInterface(s.Fields) +} + +func sealedFieldsInterface(fields []*SealedField) map[string]interface{} { + item := map[string]interface{}{} + + for _, f := range fields { + item[f.ID.String()] = f.Val.Value().Interface() + } + + return item +} + +func (s *Sealed) Item(i ItemID) *SealedItem { + if s == nil { + return nil + } + for _, item := range s.Items { + if item.Match(i) { + return item + } + if g := item.Group(i); g != nil { + return g + } + } + return nil +} + +func (s *Sealed) ItemBy(ptr *Pointer) *SealedItem { + if s == nil || ptr == nil { + return nil + } + if sg, ok := ptr.ItemBySchemaGroup(); ok { + return s.ItemBySchemaGroup(sg) + } + if i, ok := ptr.Item(); ok { + return s.Item(i) + } + return nil +} + +func (s *Sealed) ItemBySchemaGroup(i SchemaGroupID) *SealedItem { + if s == nil { + return nil + } + for _, item := range s.Items { + if item.SchemaGroup == i { + return item + } + } + return nil +} + +func (s *Sealed) Field(id FieldID) *SealedField { + if s == nil { + return nil + } + for _, i := range s.Items { + if f := i.Field(id); f != nil { + return f + } + } + return nil +} + +func (s *Sealed) FieldBy(ptr *Pointer) *SealedField { + if s == nil || ptr == nil { + return nil + } + if sg, f, ok := ptr.FieldBySchemaGroup(); ok { + return s.ItemBySchemaGroup(sg).Field(f) + } + if i, f, ok := ptr.FieldByItem(); ok { + return s.Item(i).Field(f) + } + if f, ok := ptr.Field(); ok { + return s.Field(f) + } + return nil +} + +func (s *SealedItem) Match(id ItemID) bool { + if s == nil { + return false + } + return s.Original != nil && *s.Original == id || s.Parent != nil && *s.Parent == id +} + +func (s *SealedItem) Group(id ItemID) *SealedItem { + if s == nil { + return nil + } + for _, g := range s.Groups { + if g.Match(id) { + return g + } + } + return nil +} + +func (s *SealedItem) Field(id FieldID) *SealedField { + if s == nil { + return nil + } + for _, f := range s.Fields { + if f.ID == id { + return f + } + } + return nil +} diff --git a/server/pkg/property/sealed_test.go b/server/pkg/property/sealed_test.go new file mode 100644 index 000000000..4eb471c74 --- /dev/null +++ b/server/pkg/property/sealed_test.go @@ -0,0 +1,1063 @@ +package property + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/stretchr/testify/assert" +) + +var ( + sid = NewSceneID() + ds = NewDatasetSchemaID() + df = NewDatasetFieldID() + d = NewDatasetID() + opid = NewID() + ppid = NewID() + psid = MustSchemaID("hoge~0.1.0/fff") + psiid1 = SchemaGroupID("x") + psiid2 = SchemaGroupID("y") + i1id = NewItemID() + i2id = NewItemID() + i3id = NewItemID() + i4id = NewItemID() + i5id = NewItemID() +) + +func TestSeal(t *testing.T) { + tests := []struct { + Name string + MD *Merged + DSGL dataset.GraphLoader + Expected *Sealed + Err error + }{ + { + Name: "nil group", + }, + { + Name: "seal", + MD: &Merged{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("a"), + Type: ValueTypeString, + }, + { + ID: FieldID("b"), + Value: ValueTypeString.ValueFrom("b"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("aaa"), + Type: ValueTypeString, + }, + { + ID: FieldID("b"), + Value: ValueTypeString.ValueFrom("aaa"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + }, + }, + }, + }, + DSGL: dataset.GraphLoaderFromMap(map[DatasetID]*dataset.Dataset{ + d: dataset.New().Scene(sid).ID(d).Schema(ds).Fields([]*dataset.Field{ + dataset.NewField(df, dataset.ValueTypeString.ValueFrom("bbb"), ""), + }).MustBuild(), + }), + Expected: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), + }, + }, + }, + }, + }, + Err: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res, err := Seal(context.Background(), tc.MD, tc.DSGL) + assert.Equal(t, tc.Expected, res) + assert.Nil(t, err) + }) + } +} + +func TestSealProperty(t *testing.T) { + pid := NewID() + ps := MustSchemaID("xxx~1.1.1/aa") + + tests := []struct { + Name string + Input *Property + Expected *Sealed + }{ + { + Name: "nil property", + }, + { + Name: "seal property", + Input: New().ID(pid).Scene(NewSceneID()).Schema(ps).MustBuild(), + Expected: &Sealed{ + Original: pid.Ref(), + Parent: nil, + Schema: ps, + LinkedDataset: nil, + Items: []*SealedItem{}, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := SealProperty(context.Background(), tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestSealedItemFrom(t *testing.T) { + + tests := []struct { + Name string + MG *MergedGroup + DSGL dataset.GraphLoader + Expected *SealedItem + Err error + }{ + { + Name: "nil group", + }, + { + Name: "groups != nil", + MG: &MergedGroup{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("a"), + Type: ValueTypeString, + }, + { + ID: FieldID("b"), + Value: ValueTypeString.ValueFrom("b"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + }, + }, + }, + }, + DSGL: dataset.GraphLoaderFromMap(map[DatasetID]*dataset.Dataset{ + d: dataset.New().Scene(sid).ID(d).Schema(ds).Fields([]*dataset.Field{ + dataset.NewField(df, dataset.ValueTypeString.ValueFrom("bbb"), ""), + }).MustBuild(), + }), + Expected: &SealedItem{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), + }, + }, + }, + }, + }, + Err: nil, + }, + { + Name: "groups == nil", + MG: &MergedGroup{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: FieldID("a"), + Value: ValueTypeString.ValueFrom("aaa"), + Type: ValueTypeString, + }, + { + ID: FieldID("b"), + Value: ValueTypeString.ValueFrom("aaa"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + }, + }, + }, + }, + DSGL: dataset.GraphLoaderFromMap(map[DatasetID]*dataset.Dataset{ + d: dataset.New().Scene(sid).ID(d).Schema(ds).Fields([]*dataset.Field{ + dataset.NewField(df, dataset.ValueTypeString.ValueFrom("bbb"), ""), + }).MustBuild(), + }), + Expected: &SealedItem{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), + }, + }, + }, + }, + }, + Err: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res, err := sealedItemFrom(context.Background(), tc.MG, tc.DSGL) + assert.Equal(t, tc.Expected, res) + assert.Nil(t, err) + }) + } +} + +func TestSealed_Interface(t *testing.T) { + + tests := []struct { + Name string + S *Sealed + Expected map[string]interface{} + }{ + { + Name: "nil sealed", + }, + { + Name: "get sealed interface", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), + }, + }, + }, + }, + }, + Expected: map[string]interface{}{ + "x": []map[string]interface{}{ + { + "a": "a", + "b": "bbb", + "id": i5id.String(), + }, + }, + "y": map[string]interface{}{ + "a": "aaa", + "b": "bbb", + }, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.S.Interface() + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestSealedItem_Match(t *testing.T) { + tests := []struct { + Name string + SI *SealedItem + Input ItemID + Expected bool + }{ + { + Name: "nil sealed", + }, + { + Name: "", + SI: &SealedItem{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), + }, + }, + }, + }, + }, + Input: i2id, + Expected: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.SI.Match(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestSealed_ItemBy(t *testing.T) { + + tests := []struct { + Name string + S *Sealed + Input *Pointer + Expected *SealedItem + }{ + { + Name: "nil sealed", + }, + { + Name: "get group", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("b"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), + }, + }, + }, + }, + }, + Input: NewPointer(psiid1.Ref(), i1id.Ref(), FieldID("a").Ref()), + Expected: &SealedItem{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), + }, + }, + }, + }, + }, + }, + { + Name: "get item", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), + }, + }, + }, + }, + }, + Input: NewPointer(nil, i1id.Ref(), FieldID("a").Ref()), + Expected: &SealedItem{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), + }, + }, + }, + }, + }, + }, + { + Name: "nil ptr sg", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), + }, + }, + }, + }, + }, + Input: NewPointer(nil, nil, FieldID("a").Ref()), + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.S.ItemBy(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestSealed_FieldBy(t *testing.T) { + + tests := []struct { + Name string + S *Sealed + Input *Pointer + Expected *SealedField + }{ + { + Name: "nil sealed", + }, + { + Name: "get group", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), + }, + }, + }, + }, + }, + Input: NewPointer(psiid1.Ref(), i1id.Ref(), FieldID("a").Ref()), + Expected: &SealedField{ + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), + }, + }, + { + Name: "get item", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("b"), + ValueTypeString.ValueFrom("bbb"), + ), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), + }, + }, + }, + }, + }, + Input: NewPointer(nil, i3id.Ref(), FieldID("a").Ref()), + Expected: &SealedField{ + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), + }, + }, + { + Name: "nil ptr sg", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("b"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), + }, + { + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), + }, + }, + }, + }, + }, + Input: NewPointer(nil, nil, FieldID("a").Ref()), + Expected: &SealedField{ + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.S.FieldBy(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} diff --git a/server/pkg/property/validator.go b/server/pkg/property/validator.go new file mode 100644 index 000000000..59b05da72 --- /dev/null +++ b/server/pkg/property/validator.go @@ -0,0 +1,28 @@ +package property + +import ( + "context" + "fmt" +) + +type Validator struct { + SchemaLoader SchemaLoader +} + +func (v Validator) Validate(ctx context.Context, properties List) error { + schemaIDs := properties.Schemas() + schemas, err := v.SchemaLoader(ctx, schemaIDs...) + if err != nil { + return err + } + schemaMap := schemas.Map() + + for _, p := range properties { + schema := schemaMap[p.Schema()] + if err := p.ValidateSchema(schema); err != nil { + return fmt.Errorf("invalid property: %s (%s): %w", p.ID(), p.Schema(), err) + } + } + + return nil +} diff --git a/server/pkg/property/value.go b/server/pkg/property/value.go new file mode 100644 index 000000000..2d10acc0e --- /dev/null +++ b/server/pkg/property/value.go @@ -0,0 +1,244 @@ +package property + +import ( + "net/url" + + "github.com/reearth/reearth-backend/pkg/value" +) + +type LatLng = value.LatLng +type LatLngHeight = value.LatLngHeight +type Coordinates = value.Coordinates +type Rect = value.Rect +type Polygon = value.Polygon + +type ValueType value.Type + +var ( + ValueTypeUnknown = ValueType(value.TypeUnknown) + ValueTypeBool = ValueType(value.TypeBool) + ValueTypeNumber = ValueType(value.TypeNumber) + ValueTypeString = ValueType(value.TypeString) + ValueTypeRef = ValueType(value.TypeRef) + ValueTypeURL = ValueType(value.TypeURL) + ValueTypeLatLng = ValueType(value.TypeLatLng) + ValueTypeLatLngHeight = ValueType(value.TypeLatLngHeight) + ValueTypeCoordinates = ValueType(value.TypeCoordinates) + ValueTypePolygon = ValueType(value.TypePolygon) + ValueTypeRect = ValueType(value.TypeRect) +) + +var types = value.TypePropertyMap{ + value.Type(ValueTypeTypography): &typePropertyTypography{}, + value.Type(ValueTypeCamera): &typePropertyCamera{}, +} + +func (vt ValueType) Valid() bool { + if _, ok := types[value.Type(vt)]; ok { + return true + } + return value.Type(vt).Default() +} + +func (t ValueType) Default() bool { + if _, ok := types[value.Type(t)]; ok { + return true + } + return value.Type(t).Default() +} + +func (vt ValueType) ValueFrom(i interface{}) *Value { + v := value.Type(vt).ValueFrom(i, types) + if v == nil { + return nil + } + return &Value{v: *v} +} + +func (vt ValueType) MustBeValue(i interface{}) *Value { + if v := vt.ValueFrom(i); v != nil { + return v + } + panic("invalid value") +} + +func (vt ValueType) None() *OptionalValue { + return NewOptionalValue(vt, nil) +} + +type Value struct { + v value.Value +} + +func (v *Value) IsEmpty() bool { + return v == nil || v.v.IsEmpty() +} + +func (v *Value) Clone() *Value { + if v == nil { + return nil + } + vv := v.v.Clone() + if vv == nil { + return nil + } + return &Value{v: *vv} +} + +func (v *Value) Some() *OptionalValue { + return OptionalValueFrom(v) +} + +func (v *Value) Type() ValueType { + if v == nil { + return ValueType(value.TypeUnknown) + } + return ValueType(v.v.Type()) +} + +func (v *Value) Value() interface{} { + if v == nil { + return nil + } + return v.v.Value() +} + +func (v *Value) Interface() interface{} { + if v == nil { + return nil + } + return v.v.Interface() +} + +func (v *Value) Cast(vt ValueType) *Value { + if v == nil { + return nil + } + nv := v.v.Cast(value.Type(vt), types) + if nv == nil { + return nil + } + return &Value{v: *nv} +} + +func (v *Value) ValueBool() *bool { + if v == nil { + return nil + } + vv, ok := v.v.ValueBool() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueNumber() *float64 { + if v == nil { + return nil + } + vv, ok := v.v.ValueNumber() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueString() *string { + if v == nil { + return nil + } + vv, ok := v.v.ValueString() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueRef() *string { + if v == nil { + return nil + } + vv, ok := v.v.ValueRef() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueURL() *url.URL { + if v == nil { + return nil + } + vv, ok := v.v.ValueURL() + if ok { + return vv + } + return nil +} + +func (v *Value) ValueLatLng() *LatLng { + if v == nil { + return nil + } + vv, ok := v.v.ValueLatLng() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueLatLngHeight() *LatLngHeight { + if v == nil { + return nil + } + vv, ok := v.v.ValueLatLngHeight() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueCoordinates() *Coordinates { + if v == nil { + return nil + } + vv, ok := v.v.ValueCoordinates() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValueRect() *Rect { + if v == nil { + return nil + } + vv, ok := v.v.ValueRect() + if ok { + return &vv + } + return nil +} + +func (v *Value) ValuePolygon() *Polygon { + if v == nil { + return nil + } + vv, ok := v.v.ValuePolygon() + if ok { + return &vv + } + return nil +} + +func ValueFromStringOrNumber(s string) *Value { + if s == "true" || s == "false" || s == "TRUE" || s == "FALSE" || s == "True" || s == "False" { + return ValueTypeBool.ValueFrom(s) + } + + if v := ValueTypeNumber.ValueFrom(s); v != nil { + return v + } + + return ValueTypeString.ValueFrom(s) +} diff --git a/server/pkg/property/value_camera.go b/server/pkg/property/value_camera.go new file mode 100644 index 000000000..378273c7d --- /dev/null +++ b/server/pkg/property/value_camera.go @@ -0,0 +1,70 @@ +package property + +import ( + "github.com/mitchellh/mapstructure" +) + +var ValueTypeCamera = ValueType("camera") + +type Camera struct { + Lat float64 `json:"lat" mapstructure:"lat"` + Lng float64 `json:"lng" mapstructure:"lng"` + Altitude float64 `json:"altitude" mapstructure:"altitude"` + Heading float64 `json:"heading" mapstructure:"heading"` + Pitch float64 `json:"pitch" mapstructure:"pitch"` + Roll float64 `json:"roll" mapstructure:"roll"` + FOV float64 `json:"fov" mapstructure:"fov"` +} + +func (c *Camera) Clone() *Camera { + if c == nil { + return nil + } + return &Camera{ + Lat: c.Lat, + Lng: c.Lng, + Altitude: c.Altitude, + Heading: c.Heading, + Pitch: c.Pitch, + Roll: c.Roll, + FOV: c.FOV, + } +} + +type typePropertyCamera struct{} + +func (*typePropertyCamera) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(Camera); ok { + return v, true + } + + if v, ok := i.(*Camera); ok { + if v != nil { + return *v, true + } + return nil, false + } + + v := Camera{} + if err := mapstructure.Decode(i, &v); err == nil { + return v, true + } + return nil, false +} + +func (*typePropertyCamera) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*typePropertyCamera) Validate(i interface{}) bool { + _, ok := i.(Camera) + return ok +} + +func (v *Value) ValueCamera() (vv Camera, ok bool) { + if v == nil { + return + } + vv, ok = v.Value().(Camera) + return +} diff --git a/server/pkg/property/value_camera_test.go b/server/pkg/property/value_camera_test.go new file mode 100644 index 000000000..75e060f7f --- /dev/null +++ b/server/pkg/property/value_camera_test.go @@ -0,0 +1,51 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCamera_Clone(t *testing.T) { + tests := []struct { + Name string + Camera, Expected *Camera + }{ + { + Name: "nil Camera", + }, + { + Name: "cloned", + Camera: &Camera{ + Lat: 1, + Lng: 1, + Altitude: 2, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }, + Expected: &Camera{ + Lat: 1, + Lng: 1, + Altitude: 2, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.Camera.Clone() + assert.Equal(t, tc.Expected, res) + if tc.Expected != nil { + assert.NotSame(t, tc.Expected, res) + } + }) + } +} diff --git a/server/pkg/property/value_dataset.go b/server/pkg/property/value_dataset.go new file mode 100644 index 000000000..261307257 --- /dev/null +++ b/server/pkg/property/value_dataset.go @@ -0,0 +1,67 @@ +package property + +import ( + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/value" +) + +type ValueAndDatasetValue struct { + t ValueType + d *dataset.Value + p *Value +} + +func NewValueAndDatasetValue(ty ValueType, d *dataset.Value, p *Value) *ValueAndDatasetValue { + if !ty.Valid() { + return nil + } + + if d != nil && ValueType(d.Type()) != ty { + d = d.Cast(dataset.ValueType(ty)) + } + + if p != nil && p.Type() != ty { + p = p.Cast(ty) + } + + return &ValueAndDatasetValue{ + t: ty, + d: d, + p: p, + } +} + +func (v *ValueAndDatasetValue) Type() ValueType { + if v == nil { + return ValueTypeUnknown + } + return v.t +} + +func (v *ValueAndDatasetValue) DatasetValue() *dataset.Value { + if v == nil || v.t == ValueTypeUnknown { + return nil + } + return v.d +} + +func (v *ValueAndDatasetValue) PropertyValue() *Value { + if v == nil || v.t == ValueTypeUnknown { + return nil + } + return v.p +} + +func (v *ValueAndDatasetValue) Value() *Value { + if v == nil || v.t == ValueTypeUnknown { + return nil + } + if v.d != nil { + return valueFromDataset(v.d) + } + return v.p +} + +func valueFromDataset(v *dataset.Value) *Value { + return ValueType(value.Type(v.Type())).ValueFrom(v.Value()) +} diff --git a/server/pkg/property/value_dataset_test.go b/server/pkg/property/value_dataset_test.go new file mode 100644 index 000000000..a701cea66 --- /dev/null +++ b/server/pkg/property/value_dataset_test.go @@ -0,0 +1,319 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/stretchr/testify/assert" +) + +func TestNewValueAndDatasetValue(t *testing.T) { + type args struct { + ty ValueType + d *dataset.Value + p *Value + } + + tests := []struct { + name string + args args + want *ValueAndDatasetValue + }{ + { + name: "ok", + args: args{ + ty: ValueTypeBool, + d: dataset.ValueTypeBool.MustBeValue(false), + p: ValueTypeBool.MustBeValue(true), + }, + want: &ValueAndDatasetValue{ + t: ValueTypeBool, + d: dataset.ValueTypeBool.ValueFrom(false), + p: ValueTypeBool.MustBeValue(true), + }, + }, + { + name: "different types 1", + args: args{ + ty: ValueTypeURL, + d: dataset.ValueTypeString.MustBeValue("https://reearth.io"), + p: nil, + }, + want: &ValueAndDatasetValue{ + t: ValueTypeURL, + d: dataset.ValueTypeURL.MustBeValue("https://reearth.io"), + p: nil, + }, + }, + { + name: "different types 3", + args: args{ + ty: ValueTypeBool, + d: dataset.ValueTypeBool.MustBeValue(false), + p: ValueTypeString.MustBeValue("true"), + }, + want: &ValueAndDatasetValue{ + t: ValueTypeBool, + d: dataset.ValueTypeBool.ValueFrom(false), + p: ValueTypeBool.MustBeValue(true), + }, + }, + { + name: "different types 2", + args: args{ + ty: ValueTypeBool, + d: dataset.ValueTypeString.ValueFrom("false"), + p: ValueTypeBool.MustBeValue(true), + }, + want: &ValueAndDatasetValue{ + t: ValueTypeBool, + d: dataset.ValueTypeBool.ValueFrom(false), + p: ValueTypeBool.MustBeValue(true), + }, + }, + { + name: "invalid type", + args: args{ + ty: ValueType("foobar"), + d: dataset.ValueTypeBool.ValueFrom(false), + p: ValueTypeBool.MustBeValue(true), + }, + want: nil, + }, + { + name: "nil dataset value", + args: args{ + ty: ValueTypeBool, + d: nil, + p: ValueTypeBool.MustBeValue(false), + }, + want: &ValueAndDatasetValue{ + t: ValueTypeBool, + d: nil, + p: ValueTypeBool.MustBeValue(false), + }, + }, + { + name: "nil property value", + args: args{ + ty: ValueTypeBool, + d: dataset.ValueTypeBool.MustBeValue(false), + p: nil, + }, + want: &ValueAndDatasetValue{ + t: ValueTypeBool, + d: dataset.ValueTypeBool.MustBeValue(false), + p: nil, + }, + }, + { + name: "nil value", + args: args{ + ty: ValueTypeBool, + d: nil, + p: nil, + }, + want: &ValueAndDatasetValue{ + t: ValueTypeBool, + d: nil, + p: nil, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, NewValueAndDatasetValue(tt.args.ty, tt.args.d, tt.args.p)) + }) + } +} + +func TestValueAndDatasetValue_Type(t *testing.T) { + tests := []struct { + name string + target *ValueAndDatasetValue + want ValueType + }{ + { + name: "ok", + target: &ValueAndDatasetValue{t: ValueTypeString}, + want: ValueTypeString, + }, + { + name: "empty", + target: &ValueAndDatasetValue{}, + want: ValueTypeUnknown, + }, + { + name: "nil", + target: nil, + want: ValueTypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Type()) + }) + } +} + +func TestValueAndDatasetValue_DatasetValuee(t *testing.T) { + tests := []struct { + name string + target *ValueAndDatasetValue + want *dataset.Value + }{ + { + name: "dataset only", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + d: dataset.ValueTypeString.ValueFrom("foo"), + }, + want: dataset.ValueTypeString.ValueFrom("foo"), + }, + { + name: "property only", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + p: ValueTypeString.ValueFrom("bar"), + }, + want: nil, + }, + { + name: "dataset and property", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + d: dataset.ValueTypeString.ValueFrom("foo"), + p: ValueTypeString.ValueFrom("bar"), + }, + want: dataset.ValueTypeString.ValueFrom("foo"), + }, + { + name: "empty", + target: &ValueAndDatasetValue{}, + want: nil, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.DatasetValue()) + }) + } +} + +func TestValueAndDatasetValue_PropertyValue(t *testing.T) { + tests := []struct { + name string + target *ValueAndDatasetValue + want *Value + }{ + { + name: "dataset only", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + d: dataset.ValueTypeString.ValueFrom("foo"), + }, + want: nil, + }, + { + name: "property only", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + p: ValueTypeString.ValueFrom("bar"), + }, + want: ValueTypeString.ValueFrom("bar"), + }, + { + name: "dataset and property", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + d: dataset.ValueTypeString.ValueFrom("foo"), + p: ValueTypeString.ValueFrom("bar"), + }, + want: ValueTypeString.ValueFrom("bar"), + }, + { + name: "empty", + target: &ValueAndDatasetValue{}, + want: nil, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.PropertyValue()) + }) + } +} + +func TestValueAndDatasetValue_Value(t *testing.T) { + tests := []struct { + name string + target *ValueAndDatasetValue + want *Value + }{ + { + name: "dataset only", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + d: dataset.ValueTypeString.MustBeValue("foo"), + }, + want: ValueTypeString.MustBeValue("foo"), + }, + { + name: "property only", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + p: ValueTypeString.MustBeValue("bar"), + }, + want: ValueTypeString.MustBeValue("bar"), + }, + { + name: "dataset and property", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + d: dataset.ValueTypeString.MustBeValue("foo"), + p: ValueTypeString.MustBeValue("bar"), + }, + want: ValueTypeString.MustBeValue("foo"), + }, + { + name: "empty", + target: &ValueAndDatasetValue{}, + want: nil, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Value()) + }) + } +} diff --git a/server/pkg/property/value_optional.go b/server/pkg/property/value_optional.go new file mode 100644 index 000000000..6b862ad64 --- /dev/null +++ b/server/pkg/property/value_optional.go @@ -0,0 +1,89 @@ +package property + +import "github.com/reearth/reearth-backend/pkg/value" + +type OptionalValue struct { + ov value.Optional +} + +func NewOptionalValue(t ValueType, v *Value) *OptionalValue { + var vv *value.Value + if v != nil { + vv = &v.v + } + ov := value.NewOptional(value.Type(t), vv) + if ov == nil { + return nil + } + return &OptionalValue{ov: *ov} +} + +func OptionalValueFrom(v *Value) *OptionalValue { + if v == nil { + return nil + } + ov := value.OptionalFrom(&v.v) + if ov == nil { + return nil + } + return &OptionalValue{ + ov: *ov, + } +} + +func (ov *OptionalValue) Type() ValueType { + if ov == nil { + return ValueTypeUnknown + } + return ValueType(ov.ov.Type()) +} + +func (ov *OptionalValue) Value() *Value { + if ov == nil { + return nil + } + vv := ov.ov.Value() + if vv == nil { + return nil + } + return &Value{v: *vv} +} + +func (ov *OptionalValue) TypeAndValue() (ValueType, *Value) { + return ov.Type(), ov.Value() +} + +func (ov *OptionalValue) Clone() *OptionalValue { + if ov == nil { + return nil + } + nov := ov.ov.Clone() + if nov == nil { + return nil + } + return &OptionalValue{ + ov: *nov, + } +} + +func (ov *OptionalValue) SetValue(v *Value) { + if ov == nil { + return + } + if v == nil { + ov.ov.SetValue(nil) + } else { + ov.ov.SetValue(&v.v) + } +} + +func (ov *OptionalValue) Cast(t ValueType) *OptionalValue { + if ov == nil { + return nil + } + vv := ov.ov.Cast(value.Type(t), types) + if vv == nil { + return nil + } + return &OptionalValue{ov: *vv} +} diff --git a/server/pkg/property/value_optional_test.go b/server/pkg/property/value_optional_test.go new file mode 100644 index 000000000..b25de3594 --- /dev/null +++ b/server/pkg/property/value_optional_test.go @@ -0,0 +1,361 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/value" + "github.com/stretchr/testify/assert" +) + +func TestNewNilableValue(t *testing.T) { + type args struct { + t ValueType + v *Value + } + + tests := []struct { + name string + args args + want *OptionalValue + }{ + { + name: "default type", + args: args{ + t: ValueTypeString, + v: ValueTypeString.ValueFrom("foo"), + }, + want: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foo", types))}, + }, + { + name: "nil value", + args: args{ + t: ValueTypeString, + }, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + }, + { + name: "invalid value", + args: args{ + t: ValueTypeNumber, + v: ValueTypeString.ValueFrom("foo"), + }, + want: nil, + }, + { + name: "invalid type", + args: args{ + t: ValueTypeUnknown, + v: ValueTypeString.ValueFrom("foo"), + }, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, NewOptionalValue(tt.args.t, tt.args.v)) + }) + } +} + +func TestOptionalValueFrom(t *testing.T) { + type args struct { + v *Value + } + + tests := []struct { + name string + args args + want *OptionalValue + }{ + { + name: "default type", + args: args{ + v: ValueTypeString.ValueFrom("foo"), + }, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, value.TypeString.ValueFrom("foo", types))}, + }, + { + name: "empty value", + args: args{ + v: &Value{v: value.Value{}}, + }, + want: nil, + }, + { + name: "nil value", + args: args{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, OptionalValueFrom(tt.args.v)) + }) + } +} + +func TestOptionalValue_Type(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + want ValueType + }{ + { + name: "ok", + value: &OptionalValue{ov: *value.NewOptional(value.TypeBool, nil)}, + want: ValueTypeBool, + }, + { + name: "empty", + value: &OptionalValue{}, + want: ValueTypeUnknown, + }, + { + name: "nil", + value: nil, + want: ValueTypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Type()) + }) + } +} + +func TestOptionalValue_Value(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + want *Value + }{ + { + name: "ok", + value: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foobar", types))}, + want: ValueTypeString.ValueFrom("foobar"), + }, + { + name: "empty", + value: &OptionalValue{}, + want: nil, + }, + { + name: "nil", + value: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.value.Value() + assert.Equal(t, tt.want, res) + if res != nil { + assert.NotSame(t, tt.want, res) + } + }) + } +} + +func TestOptionalValue_TypeAndValue(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + wantt ValueType + wantv *Value + }{ + { + name: "ok", + value: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foobar", types))}, + wantt: ValueTypeString, + wantv: ValueTypeString.ValueFrom("foobar"), + }, + { + name: "empty", + value: &OptionalValue{}, + wantt: ValueTypeUnknown, + wantv: nil, + }, + { + name: "nil", + value: nil, + wantt: ValueTypeUnknown, + wantv: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ty, tv := tt.value.TypeAndValue() + assert.Equal(t, tt.wantt, ty) + assert.Equal(t, tt.wantv, tv) + if tv != nil { + assert.NotSame(t, tt.wantv, tv) + } + }) + } +} + +func TestOptionalValue_SetValue(t *testing.T) { + type args struct { + v *Value + } + + tests := []struct { + name string + value *OptionalValue + args args + invalid bool + }{ + { + name: "set", + value: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foo", types))}, + args: args{v: ValueTypeString.ValueFrom("foobar")}, + }, + { + name: "set to nil", + value: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + args: args{v: ValueTypeString.ValueFrom("foobar")}, + }, + { + name: "invalid value", + value: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + args: args{v: ValueTypeNumber.ValueFrom(1)}, + invalid: true, + }, + { + name: "nil value", + args: args{v: ValueTypeNumber.ValueFrom(1)}, + }, + { + name: "empty", + value: &OptionalValue{}, + args: args{v: ValueTypeNumber.ValueFrom(1)}, + invalid: true, + }, + { + name: "nil", + args: args{v: ValueTypeNumber.ValueFrom(1)}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + var v *Value + if tt.value != nil { + v = tt.value.Value() + } + + tt.value.SetValue(tt.args.v) + + if tt.value != nil { + if tt.invalid { + assert.Equal(t, v, tt.value.Value()) + } else { + assert.Equal(t, tt.args.v, tt.value.Value()) + } + } + }) + } +} + +func TestOptionalValue_Clone(t *testing.T) { + tests := []struct { + name string + target *OptionalValue + }{ + { + name: "ok", + target: &OptionalValue{ + ov: *value.NewOptional(value.TypeString, value.TypeString.ValueFrom("foo", types)), + }, + }, + { + name: "empty", + target: &OptionalValue{}, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Clone() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestOptionalValue_Cast(t *testing.T) { + type args struct { + t ValueType + } + + tests := []struct { + name string + target *OptionalValue + args args + want *OptionalValue + }{ + { + name: "diff type", + target: &OptionalValue{ov: *value.OptionalFrom(value.TypeNumber.ValueFrom(1.1, types))}, + args: args{t: ValueTypeString}, + want: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("1.1", types))}, + }, + { + name: "same type", + target: &OptionalValue{ov: *value.OptionalFrom(value.TypeNumber.ValueFrom(1.1, types))}, + args: args{t: ValueTypeNumber}, + want: &OptionalValue{ov: *value.OptionalFrom(value.TypeNumber.ValueFrom(1.1, types))}, + }, + { + name: "failed to cast", + target: &OptionalValue{ov: *value.OptionalFrom(value.TypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}, types))}, + args: args{t: ValueTypeString}, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + }, + { + name: "empty", + target: &OptionalValue{}, + args: args{t: ValueTypeString}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{t: ValueTypeString}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Cast(tt.args.t)) + }) + } +} diff --git a/server/pkg/property/value_test.go b/server/pkg/property/value_test.go new file mode 100644 index 000000000..8df347939 --- /dev/null +++ b/server/pkg/property/value_test.go @@ -0,0 +1,397 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/value" + "github.com/stretchr/testify/assert" +) + +func TestValueType_None(t *testing.T) { + tests := []struct { + name string + tr ValueType + want *OptionalValue + }{ + { + name: "default", + tr: ValueTypeString, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + }, + { + name: "unknown", + tr: ValueTypeUnknown, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.tr.None()) + }) + } +} + +func TestValue_IsEmpty(t *testing.T) { + tests := []struct { + name string + value *Value + want bool + }{ + { + name: "empty", + want: true, + }, + { + name: "nil", + want: true, + }, + { + name: "non-empty", + value: ValueTypeString.ValueFrom("foo"), + want: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.IsEmpty()) + }) + } +} + +func TestValue_Clone(t *testing.T) { + tests := []struct { + name string + value *Value + want *Value + }{ + { + name: "ok", + value: ValueTypeString.ValueFrom("foo"), + want: &Value{ + v: *value.TypeString.ValueFrom("foo", types), + }, + }, + { + name: "nil", + value: nil, + want: nil, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Clone()) + }) + } +} + +func TestValue_Some(t *testing.T) { + tests := []struct { + name string + value *Value + want *OptionalValue + }{ + { + name: "ok", + value: &Value{ + v: *value.TypeString.ValueFrom("foo", types), + }, + want: &OptionalValue{ + ov: *value.OptionalFrom(value.TypeString.ValueFrom("foo", types)), + }, + }, + { + name: "nil", + value: nil, + want: nil, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Some()) + }) + } +} + +func TestValue_Value(t *testing.T) { + tests := []struct { + name string + value *Value + want interface{} + }{ + { + name: "ok", + value: ValueTypeString.ValueFrom("foo"), + want: "foo", + }, + { + name: "empty", + value: &Value{}, + }, + { + name: "nil", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.want == nil { + assert.Nil(t, tt.value.Value()) + } else { + assert.Equal(t, tt.want, tt.value.Value()) + } + }) + } +} + +func TestValue_Type(t *testing.T) { + tests := []struct { + name string + value *Value + want ValueType + }{ + { + name: "ok", + value: ValueTypeString.ValueFrom("foo"), + want: ValueTypeString, + }, + { + name: "empty", + value: &Value{}, + want: ValueTypeUnknown, + }, + { + name: "nil", + want: ValueTypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Type()) + }) + } +} + +func TestValue_Interface(t *testing.T) { + tests := []struct { + name string + value *Value + want interface{} + }{ + { + name: "string", + value: ValueTypeString.ValueFrom("foo"), + want: "foo", + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + { + name: "nil", + value: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Interface()) + }) + } +} + +func TestValue_Cast(t *testing.T) { + type args struct { + t ValueType + } + + tests := []struct { + name string + target *Value + args args + want *Value + }{ + { + name: "diff type", + target: ValueTypeNumber.ValueFrom(1.1), + args: args{t: ValueTypeString}, + want: ValueTypeString.ValueFrom("1.1"), + }, + { + name: "same type", + target: ValueTypeNumber.ValueFrom(1.1), + args: args{t: ValueTypeNumber}, + want: ValueTypeNumber.ValueFrom(1.1), + }, + { + name: "failed to cast", + target: ValueTypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}), + args: args{t: ValueTypeString}, + want: nil, + }, + { + name: "invalid type", + target: ValueTypeNumber.ValueFrom(1.1), + args: args{t: ValueTypeUnknown}, + want: nil, + }, + { + name: "empty", + target: &Value{}, + args: args{t: ValueTypeString}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{t: ValueTypeString}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Cast(tt.args.t)) + }) + } +} + +func TestValueFromDataset(t *testing.T) { + tests := []struct { + Name string + Input *dataset.Value + Expected struct { + V *Value + Ok bool + } + }{ + { + Name: "latlng", + Input: dataset.ValueTypeLatLng.ValueFrom(dataset.LatLng{ + Lat: 10, + Lng: 12, + }), + Expected: struct { + V *Value + Ok bool + }{ + V: ValueTypeLatLng.ValueFrom(LatLng{ + Lat: 10, + Lng: 12, + }), + Ok: true, + }, + }, + { + Name: "LatLngHeight", + Input: dataset.ValueTypeLatLngHeight.ValueFrom(dataset.LatLngHeight{ + Lat: 10, + Lng: 12, + Height: 14, + }), + Expected: struct { + V *Value + Ok bool + }{ + V: ValueTypeLatLngHeight.ValueFrom(LatLngHeight{ + Lat: 10, + Lng: 12, + Height: 14, + }), + Ok: true, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected.V, valueFromDataset(tc.Input)) + }) + } +} + +func TestValueFromStringOrNumber(t *testing.T) { + type args struct { + s string + } + + tests := []struct { + name string + args args + want *Value + }{ + { + name: "string", + args: args{"aax"}, + want: ValueTypeString.ValueFrom("aax"), + }, + { + name: "number positive int", + args: args{"1023"}, + want: ValueTypeNumber.ValueFrom(1023), + }, + { + name: "number negative int", + args: args{"-1"}, + want: ValueTypeNumber.ValueFrom(-1), + }, + { + name: "number float", + args: args{"1.14"}, + want: ValueTypeNumber.ValueFrom(1.14), + }, + { + name: "bool true", + args: args{"true"}, + want: ValueTypeBool.ValueFrom(true), + }, + { + name: "bool false", + args: args{"false"}, + want: ValueTypeBool.ValueFrom(false), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, ValueFromStringOrNumber(tt.args.s)) + }) + } +} diff --git a/server/pkg/property/value_typography.go b/server/pkg/property/value_typography.go new file mode 100644 index 000000000..48f24987d --- /dev/null +++ b/server/pkg/property/value_typography.go @@ -0,0 +1,133 @@ +package property + +import ( + "github.com/mitchellh/mapstructure" +) + +var ValueTypeTypography = ValueType("typography") + +type Typography struct { + FontFamily *string `json:"fontFamily" mapstructure:"fontFamily"` + FontWeight *string `json:"fontWeight" mapstructure:"fontWeight"` + FontSize *int `json:"fontSize" mapstructure:"fontSize"` + Color *string `json:"color" mapstructure:"color"` + TextAlign *TextAlign `json:"textAlign" mapstructure:"textAlign"` + Bold *bool `json:"bold" mapstructure:"bold"` + Italic *bool `json:"italic" mapstructure:"italic"` + Underline *bool `json:"underline" mapstructure:"underline"` +} + +func (t *Typography) Clone() *Typography { + if t == nil { + return nil + } + return &Typography{ + FontFamily: t.FontFamily, + FontWeight: t.FontWeight, + FontSize: t.FontSize, + Color: t.Color, + TextAlign: t.TextAlign, + Bold: t.Bold, + Italic: t.Italic, + Underline: t.Underline, + } +} + +type TextAlign string + +const ( + TextAlignLeft TextAlign = "left" + TextAlignCenter TextAlign = "center" + TextAlignRight TextAlign = "right" + TextAlignJustify TextAlign = "justify" + TextAlignJustifyAll TextAlign = "justify_all" +) + +func TextAlignFrom(t string) (TextAlign, bool) { + switch TextAlign(t) { + case TextAlignLeft: + return TextAlignLeft, true + case TextAlignCenter: + return TextAlignCenter, true + case TextAlignRight: + return TextAlignRight, true + case TextAlignJustify: + return TextAlignJustify, true + case TextAlignJustifyAll: + return TextAlignJustifyAll, true + } + return TextAlign(""), false +} + +func TextAlignFromRef(t *string) *TextAlign { + if t == nil { + return nil + } + var t2 TextAlign + switch TextAlign(*t) { + case TextAlignLeft: + t2 = TextAlignLeft + case TextAlignCenter: + t2 = TextAlignCenter + case TextAlignRight: + t2 = TextAlignRight + case TextAlignJustify: + t2 = TextAlignJustify + case TextAlignJustifyAll: + t2 = TextAlignJustifyAll + default: + return nil + } + return &t2 +} + +func (t TextAlign) String() string { + return string(t) +} + +func (t *TextAlign) StringRef() *string { + if t == nil { + return nil + } + t2 := string(*t) + return &t2 +} + +type typePropertyTypography struct{} + +func (*typePropertyTypography) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(Typography); ok { + return v, true + } + + if v, ok := i.(*Typography); ok { + if v != nil { + return *v, true + } + return nil, false + } + + v := Typography{} + if err := mapstructure.Decode(i, &v); err == nil { + return v, true + } + + return nil, false +} + +func (*typePropertyTypography) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*typePropertyTypography) Validate(i interface{}) bool { + _, ok := i.(Typography) + return ok +} + +func (v *Value) ValueTypography() (vv Typography, ok bool) { + if v == nil { + return + } + vv, ok = v.Value().(Typography) + return +} diff --git a/server/pkg/property/value_typography_test.go b/server/pkg/property/value_typography_test.go new file mode 100644 index 000000000..31f149ae4 --- /dev/null +++ b/server/pkg/property/value_typography_test.go @@ -0,0 +1,205 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func getStrRef(i string) *string { + return &i +} + +func getBoolRef(i bool) *bool { + return &i +} + +func TestTypography_Clone(t *testing.T) { + i := 10 + + testes := []struct { + Name string + Typography, Expected *Typography + }{ + { + Name: "nil typography", + }, + { + Name: "cloned", + Typography: &Typography{ + FontFamily: getStrRef("x"), + FontWeight: getStrRef("b"), + FontSize: &i, + Color: getStrRef("red"), + TextAlign: TextAlignFromRef(getStrRef(TextAlignCenter.String())), + Bold: getBoolRef(true), + Italic: getBoolRef(false), + Underline: getBoolRef(true), + }, + Expected: &Typography{ + FontFamily: getStrRef("x"), + FontWeight: getStrRef("b"), + FontSize: &i, + Color: getStrRef("red"), + TextAlign: TextAlignFromRef(getStrRef("center")), + Bold: getBoolRef(true), + Italic: getBoolRef(false), + Underline: getBoolRef(true), + }, + }, + } + + for _, tc := range testes { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.Typography.Clone() + assert.Equal(t, tc.Expected, res) + if tc.Expected != nil { + assert.NotSame(t, tc.Expected, res) + } + }) + } +} + +func TestTextAlignFrom(t *testing.T) { + tests := []struct { + Name string + Expected struct { + TA TextAlign + Bool bool + } + }{ + { + Name: "left", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignLeft, + Bool: true, + }, + }, + { + Name: "right", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignRight, + Bool: true, + }, + }, + { + Name: "center", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignCenter, + Bool: true, + }, + }, + { + Name: "justify", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignJustify, + Bool: true, + }, + }, + { + Name: "justify_all", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignJustifyAll, + Bool: true, + }, + }, + { + Name: "undefined", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlign(""), + Bool: false, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res, ok := TextAlignFrom(tc.Name) + assert.Equal(t, tc.Expected.TA, res) + assert.Equal(t, tc.Expected.Bool, ok) + }) + } +} + +func TestTextAlignFromRef(t *testing.T) { + ja := TextAlignJustifyAll + j := TextAlignJustify + c := TextAlignCenter + l := TextAlignLeft + r := TextAlignRight + + tests := []struct { + Name string + Input *string + Expected *TextAlign + }{ + { + Name: "left", + Input: getStrRef("left"), + Expected: &l, + }, + { + Name: "right", + Input: getStrRef("right"), + Expected: &r, + }, + { + Name: "center", + Input: getStrRef("center"), + Expected: &c, + }, + { + Name: "justify", + Input: getStrRef("justify"), + Expected: &j, + }, + { + Name: "justify_all", + Input: getStrRef("justify_all"), + Expected: &ja, + }, + { + Name: "undefined", + Input: getStrRef("undefined"), + }, + { + Name: "nil input", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := TextAlignFromRef(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestTextAlign_StringRef(t *testing.T) { + var ta *TextAlign + assert.Nil(t, ta.StringRef()) +} diff --git a/server/pkg/rerror/error.go b/server/pkg/rerror/error.go new file mode 100644 index 000000000..a9ca38a7b --- /dev/null +++ b/server/pkg/rerror/error.go @@ -0,0 +1,145 @@ +package rerror + +import ( + "fmt" + "runtime/debug" + + "github.com/pkg/errors" + "github.com/reearth/reearth-backend/pkg/log" +) + +var ( + errInternal = errors.New("internal") + // ErrNotFound indicates something was not found. + ErrNotFound = errors.New("not found") + // ErrInvalidParams represents the params are invalid, such as empty string. + ErrInvalidParams = errors.New("invalid params") + // ErrNotImplemented indicates unimplemented. + ErrNotImplemented = errors.New("not implemented") +) + +func ErrInternalBy(err error) error { + return errInternalBy(errInternal, err) +} + +func ErrInternalByWith(label string, err error) error { + return errInternalBy(errors.New(label), err) +} + +func ErrInternalByWithError(label, err error) error { + return errInternalBy(label, err) +} + +func errInternalBy(label, err error) *Error { + log.Errorf("%s: %s", label.Error(), err.Error()) + debug.PrintStack() + return &Error{ + Label: label, + Err: err, + Hidden: true, + } +} + +func UnwrapErrInternal(err error) error { + return As(err, errInternal) +} + +// Error can hold an error together with label. +// This is useful for displaying a hierarchical error message cleanly and searching by label later to retrieve a wrapped error. +// Currently, Go standard error library does not support these use cases. That's why we need our own error type. +type Error struct { + Label error + Err error + Hidden bool + Separate bool +} + +// From creates an Error with string label. +func From(label string, err error) *Error { + return &Error{Label: errors.New(label), Err: err} +} + +// From creates an Error with string label, but separated from wrapped error message when the error is printed. +func FromSep(label string, err error) *Error { + return &Error{Label: errors.New(label), Err: err, Separate: true} +} + +// Error implements error interface. +func (e *Error) Error() string { + if e == nil { + return "" + } + if e.Hidden { + return e.Label.Error() + } + if !e.Separate { + if e2, ok := e.Err.(*Error); ok { + return fmt.Sprintf("%s.%s", e.Label, e2) + } + } + return fmt.Sprintf("%s: %s", e.Label, e.Err) +} + +// Unwrap implements the interface for errors.Unwrap. +func (e *Error) Unwrap() error { + if e == nil { + return nil + } + return e.Err +} + +// Get gets Error struct from an error +func Get(err error) *Error { + var target *Error + _ = errors.As(err, &target) + return target +} + +// Is looks up errors whose label is the same as the specific label and return true if it was found +func Is(err error, label error) bool { + if err == nil { + return false + } + e := err + var target *Error + for { + if !errors.As(e, &target) { + break + } + if target.Label == label { + return true + } + e = target.Unwrap() + } + return false +} + +// As looks up errors whose label is the same as the specific label and return a wrapped error. +func As(err error, label error) error { + if err == nil { + return nil + } + e := err + for { + target := Get(e) + if target == nil { + break + } + if target.Label == label { + return target.Unwrap() + } + e = target.Unwrap() + } + return nil +} + +// With returns a new constructor to generate an Error with specific label. +func With(label error) func(error) *Error { + return func(err error) *Error { + return &Error{ + Label: label, + Err: err, + Separate: true, + } + } +} diff --git a/server/pkg/rerror/error_test.go b/server/pkg/rerror/error_test.go new file mode 100644 index 000000000..ef6846007 --- /dev/null +++ b/server/pkg/rerror/error_test.go @@ -0,0 +1,170 @@ +package rerror + +import ( + "fmt" + "testing" + + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" +) + +func TestErrInternal(t *testing.T) { + werr := errors.New("wrapped") + err := ErrInternalBy(werr) + assert.EqualError(t, err, "internal") + assert.IsType(t, err, &Error{}) + assert.Same(t, werr, errors.Unwrap(err)) + + err2 := ErrInternalByWith("a", werr) + assert.EqualError(t, err2, "a") + assert.IsType(t, err2, &Error{}) + assert.Same(t, werr, errors.Unwrap(err2)) + + err3 := ErrInternalByWithError(errors.New("x"), werr) + assert.EqualError(t, err3, "x") + assert.IsType(t, err3, &Error{}) + assert.Same(t, werr, errors.Unwrap(err3)) +} + +func TestError(t *testing.T) { + werr := errors.New("wrapped") + err := &Error{Label: errors.New("label"), Err: werr} + + assert.EqualError(t, err, "label: wrapped") + assert.Same(t, werr, errors.Unwrap(err)) + + label2 := errors.New("foo") + err3 := &Error{Label: label2, Err: err} + assert.EqualError(t, err3, "foo.label: wrapped") + + err4 := &Error{Label: errors.New("bar"), Err: err3} + assert.EqualError(t, err4, "bar.foo.label: wrapped") + + err5 := &Error{ + Label: errors.New("label"), + Err: werr, + Hidden: true, + } + assert.EqualError(t, err5, "label") + + var nilerr *Error + assert.EqualError(t, nilerr, "") + assert.Nil(t, nilerr.Unwrap()) + + err6 := &Error{Label: errors.New("d"), Err: &Error{Label: errors.New("e"), Err: &Error{Label: errors.New("f"), Err: errors.New("g")}}, Separate: true} + assert.EqualError(t, err6, "d: e.f: g") +} + +func TestUnwrapErrInternal(t *testing.T) { + err := errors.New("err") + assert.Same(t, err, UnwrapErrInternal(ErrInternalBy(err))) + assert.Nil(t, UnwrapErrInternal(err)) +} + +func TestFrom(t *testing.T) { + werr := errors.New("wrapped") + err := From("label", werr) + assert.Equal(t, "label", err.Label.Error()) + assert.Same(t, werr, err.Err) + assert.False(t, err.Hidden) +} + +func TestFromSep(t *testing.T) { + werr := &Error{Label: errors.New("wrapped"), Err: errors.New("wrapped2")} + err := FromSep("label", werr) + assert.EqualError(t, err.Label, "label") + assert.Same(t, werr, err.Err) + assert.False(t, err.Hidden) + assert.True(t, err.Separate) +} + +func TestGet(t *testing.T) { + werr := &Error{Label: errors.New("hoge"), Err: errors.New("wrapped")} + err := fmt.Errorf("wrapped: %w", werr) + assert.Same(t, werr, Get(err)) + assert.Same(t, werr, Get(werr)) +} + +func TestIs(t *testing.T) { + werr := errors.New("wrapped") + label := errors.New("label") + err := &Error{ + Label: label, + Err: werr, + } + assert.True(t, Is(err, label)) + assert.False(t, Is(err, errors.New("label"))) + assert.False(t, Is(err, errors.New("nested"))) + assert.False(t, Is(err, errors.New("wrapped"))) + + label2 := errors.New("nested") + err = &Error{ + Label: label2, + Err: &Error{ + Label: label, + Err: werr, + }, + } + assert.True(t, Is(err, label)) + assert.True(t, Is(err, label2)) + assert.False(t, Is(err, errors.New("label"))) + assert.False(t, Is(err, errors.New("nested"))) + assert.False(t, Is(err, errors.New("wrapped"))) + assert.False(t, Is(nil, errors.New("label"))) +} + +func TestAs(t *testing.T) { + werr := errors.New("wrapped") + label := errors.New("label") + err := &Error{ + Label: label, + Err: werr, + } + assert.Same(t, werr, As(err, label)) + assert.Nil(t, As(err, errors.New("label"))) + assert.Nil(t, As(err, errors.New("nested"))) + assert.Nil(t, As(err, errors.New("wrapped"))) + + label2 := errors.New("nested") + err = &Error{ + Label: label2, + Err: &Error{ + Label: label, + Err: werr, + }, + } + assert.Same(t, werr, As(err, label)) + assert.Same(t, err.Err, As(err, label2)) + assert.Nil(t, As(err, errors.New("label"))) + assert.Nil(t, As(err, errors.New("nested"))) + assert.Nil(t, As(err, errors.New("wrapped"))) + + assert.Nil(t, As(nil, errors.New("label"))) + assert.Nil(t, As(errors.New("foo"), errors.New("bar"))) + assert.Nil(t, As(&Error{ + Label: errors.New("bar"), + Err: errors.New("foo"), + }, errors.New("bar"))) +} + +func TestWith(t *testing.T) { + werr := errors.New("wrapped") + label := errors.New("label") + err := With(label)(werr) + assert.Equal(t, &Error{ + Label: label, + Err: werr, + Separate: true, + }, err) + assert.Same(t, label, err.Label) + assert.Same(t, werr, err.Err) + + err = With(label)(nil) + assert.Equal(t, &Error{ + Label: label, + Err: nil, + Separate: true, + }, err) + assert.Same(t, label, err.Label) + assert.Nil(t, err.Err) +} diff --git a/server/pkg/scene/builder.go b/server/pkg/scene/builder.go new file mode 100644 index 000000000..074ca5dd0 --- /dev/null +++ b/server/pkg/scene/builder.go @@ -0,0 +1,93 @@ +package scene + +import ( + "time" +) + +type Builder struct { + scene *Scene +} + +func New() *Builder { + return &Builder{scene: &Scene{}} +} + +func (b *Builder) Build() (*Scene, error) { + if b.scene.id.IsNil() { + return nil, ErrInvalidID + } + if b.scene.team.IsNil() { + return nil, ErrInvalidID + } + if b.scene.rootLayer.IsNil() { + return nil, ErrInvalidID + } + if b.scene.widgets == nil { + b.scene.widgets = NewWidgets(nil, nil) + } + if b.scene.plugins == nil { + b.scene.plugins = NewPlugins(nil) + } + if b.scene.updatedAt.IsZero() { + b.scene.updatedAt = b.scene.CreatedAt() + } + return b.scene, nil +} + +func (b *Builder) MustBuild() *Scene { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *Builder) ID(id ID) *Builder { + b.scene.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.scene.id = NewID() + return b +} + +func (b *Builder) Project(prj ProjectID) *Builder { + b.scene.project = prj + return b +} + +func (b *Builder) Team(team TeamID) *Builder { + b.scene.team = team + return b +} + +func (b *Builder) UpdatedAt(updatedAt time.Time) *Builder { + b.scene.updatedAt = updatedAt + return b +} + +func (b *Builder) Widgets(widgets *Widgets) *Builder { + b.scene.widgets = widgets + return b +} + +func (b *Builder) RootLayer(rootLayer LayerID) *Builder { + b.scene.rootLayer = rootLayer + return b +} + +func (b *Builder) Plugins(plugins *Plugins) *Builder { + b.scene.plugins = plugins + return b +} + +func (b *Builder) Property(p PropertyID) *Builder { + b.scene.property = p + return b +} + +func (b *Builder) Clusters(cl *ClusterList) *Builder { + b.scene.clusters = cl + return b +} diff --git a/server/pkg/scene/builder/builder.go b/server/pkg/scene/builder/builder.go new file mode 100644 index 000000000..b3123c572 --- /dev/null +++ b/server/pkg/scene/builder/builder.go @@ -0,0 +1,81 @@ +package builder + +import ( + "context" + "encoding/json" + "io" + "time" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/encoding" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/tag" +) + +const ( + // schema version + version = 1 +) + +type Builder struct { + ploader property.Loader + tloader tag.SceneLoader + exporter *encoding.Exporter + encoder *encoder +} + +func New(ll layer.Loader, pl property.Loader, dl dataset.GraphLoader, tl tag.Loader, tsl tag.SceneLoader) *Builder { + e := &encoder{} + return &Builder{ + ploader: pl, + tloader: tsl, + encoder: e, + exporter: &encoding.Exporter{ + Merger: &merging.Merger{ + LayerLoader: ll, + PropertyLoader: pl, + }, + Sealer: &merging.Sealer{ + DatasetGraphLoader: dl, + TagLoader: tl, + }, + Encoder: e, + }, + } +} + +func (b *Builder) BuildScene(ctx context.Context, w io.Writer, s *scene.Scene, publishedAt time.Time) error { + if b == nil { + return nil + } + + res, err := b.buildScene(ctx, s, publishedAt) + if err != nil { + return err + } + + return json.NewEncoder(w).Encode(res) +} + +func (b *Builder) buildScene(ctx context.Context, s *scene.Scene, publishedAt time.Time) (*sceneJSON, error) { + if b == nil { + return nil, nil + } + + // properties + p, err := b.ploader(ctx, s.Properties()...) + if err != nil { + return nil, err + } + + // layers + if err := b.exporter.ExportLayerByID(ctx, s.RootLayer()); err != nil { + return nil, err + } + layers := b.encoder.Result() + + return b.scene(ctx, s, publishedAt, layers, p) +} diff --git a/server/pkg/scene/builder/builder_test.go b/server/pkg/scene/builder/builder_test.go new file mode 100644 index 000000000..8a0ad664f --- /dev/null +++ b/server/pkg/scene/builder/builder_test.go @@ -0,0 +1,773 @@ +package builder + +import ( + "context" + "testing" + "time" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/tag" + "github.com/stretchr/testify/assert" +) + +func TestSceneBuilder(t *testing.T) { + publishedAt := time.Date(2019, time.August, 15, 0, 0, 0, 0, time.Local) + + // ids + sceneID := scene.NewID() + scenePropertyID := property.NewID() + propertySchemaID := property.MustSchemaID("hoge~0.1.0/foobar") + pluginID := layer.MustPluginID("hoge~0.1.0") + pluginExtension1ID := layer.PluginExtensionID("ext") + pluginExtension2ID := layer.PluginExtensionID("ext2") + propertySchemaField1ID := property.FieldID("a") + propertySchemaField2ID := property.FieldID("b") + propertySchemaField3ID := property.FieldID("c") + propertySchemaGroup1ID := property.SchemaGroupID("A") + propertySchemaGroup2ID := property.SchemaGroupID("B") + propertyItemID1 := property.NewItemID() + propertyItemID2 := property.NewItemID() + + // datasets + dss1id := dataset.NewSchemaID() + dss2id := dataset.NewSchemaID() + dss3id := dataset.NewSchemaID() + ds1id := dataset.NewID() + ds2id := dataset.NewID() + ds3id := dataset.NewID() + ds1f1 := dataset.NewFieldID() + ds1f2 := dataset.NewFieldID() + ds2f1 := dataset.NewFieldID() + ds3f1 := dataset.NewFieldID() + ds1 := dataset.New().ID(ds1id).Fields([]*dataset.Field{ + dataset.NewField( + ds1f1, + dataset.ValueTypeRef.ValueFrom(ds2id), + "ds1f1", + ), + dataset.NewField( + ds1f2, + dataset.ValueTypeString.ValueFrom("a"), + "ds1f2", + ), + }).Scene(sceneID).Schema(dss1id).Source("ds1").MustBuild() + ds2 := dataset.New().ID(ds2id).Fields([]*dataset.Field{ + dataset.NewField( + ds2f1, + dataset.ValueTypeRef.ValueFrom(ds3id), + "ds2", + ), + }).Scene(sceneID).Schema(dss2id).Source("ds2").MustBuild() + ds3 := dataset.New().ID(ds3id).Fields([]*dataset.Field{ + dataset.NewField( + ds3f1, + dataset.ValueTypeString.ValueFrom("b"), + "ds3", + ), + }).Scene(sceneID).Schema(dss3id).Source("ds3").MustBuild() + + // tags + tag1 := tag.NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() + tag2 := tag.NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() + tag3 := tag.NewItem().NewID().Label("unused").Scene(sceneID).MustBuild() + tag4 := tag.NewGroup().NewID().Label("bar").Scene(sceneID).Tags(tag.IDList{ + tag1.ID(), tag2.ID(), tag3.ID(), + }).MustBuild() + tag5 := tag.NewItem().NewID().Label("dummy").Scene(scene.NewID()).MustBuild() // dummy + tags := tag.List{tag1, tag2, tag3, tag4, tag5} + + // layer tags + ltag1 := layer.NewTagItem(tag1.ID()) + ltag2 := layer.NewTagItem(tag2.ID()) + ltag3 := layer.NewTagGroup(tag4.ID(), []*layer.TagItem{ltag2}) + + // layer1: normal layer item + layer1p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewField(propertySchemaField1ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("xxx"))). + MustBuild(), + property.NewField(propertySchemaField2ID). + Value(property.OptionalValueFrom(property.ValueTypeNumber.ValueFrom(1))). + MustBuild(), + }).MustBuild(), + }). + MustBuild() + layer1 := layer.NewItem(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer1p.IDRef()). + Tags(layer.NewTagList([]layer.Tag{ltag1, ltag3})). + MustBuild() + + // layer2: normal layer group + layer21p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewField(propertySchemaField1ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("yyy"))). + MustBuild(), + property.NewField(propertySchemaField2ID). + Value(property.OptionalValueFrom(property.ValueTypeNumber.ValueFrom(1))). + MustBuild(), + }).MustBuild(), + }). + MustBuild() + layer21 := layer.NewItem(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer21p.IDRef()). + Tags(layer.NewTagList([]layer.Tag{ltag2})). + MustBuild() + layer2p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewField(propertySchemaField1ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("xxx"))). + MustBuild(), + property.NewField(propertySchemaField3ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("test"))). + MustBuild(), + }).MustBuild(), + }). + MustBuild() + layer2ibf1 := layer.NewInfoboxField(). + NewID(). + Plugin(pluginID). + Extension(pluginExtension1ID). + Property(layer2p.ID()). + MustBuild() + layer2ib := layer.NewInfobox([]*layer.InfoboxField{ + layer2ibf1, + }, scenePropertyID) + layer2 := layer.NewGroup(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer2p.IDRef()). + Infobox(layer2ib). + Layers(layer.NewIDList([]layer.ID{layer21.ID()})). + Tags(layer.NewTagList([]layer.Tag{ltag1, ltag3})). + MustBuild() + + // layer3: full-linked layer item with infobox + layer3p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewField(propertySchemaField1ID). + Value(property.NewOptionalValue(property.ValueTypeString, nil)). + Links(property.NewLinks([]*property.Link{ + property.NewLink(ds2id, dss2id, ds2f1), + property.NewLink(ds3id, dss3id, ds3f1), + })). + MustBuild(), + }).MustBuild(), + }). + MustBuild() + layer3ibf1 := layer.NewInfoboxField(). + NewID(). + Plugin(pluginID). + Extension(pluginExtension1ID). + Property(scenePropertyID). + MustBuild() + layer3ib := layer.NewInfobox([]*layer.InfoboxField{ + layer3ibf1, + }, scenePropertyID) + layer3 := layer.NewItem(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer3p.IDRef()). + Infobox(layer3ib). + MustBuild() + + // layer4: linked layer group with infobox and children + layer41p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewField(propertySchemaField2ID). + Value(property.OptionalValueFrom(property.ValueTypeNumber.ValueFrom(1))). + MustBuild(), + }).MustBuild(), + }). + MustBuild() + layer41ibf1 := layer.NewInfoboxField().NewID().Plugin(pluginID).Extension(pluginExtension1ID).Property(layer41p.ID()).MustBuild() + layer41ib := layer.NewInfobox([]*layer.InfoboxField{ + layer41ibf1, + }, layer41p.ID()) + layer41 := layer.NewItem(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer41p.IDRef()). + Infobox(layer41ib). + LinkedDataset(&ds3id). + MustBuild() + layer4p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewField(propertySchemaField1ID). + Value(property.NewOptionalValue(property.ValueTypeString, nil)). + Links(property.NewLinks([]*property.Link{ + property.NewLinkFieldOnly(dss3id, ds3f1), + })). + MustBuild(), + property.NewField(propertySchemaField3ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("xxx"))). + MustBuild(), + }).MustBuild(), + }). + MustBuild() + layer4ibf1 := layer.NewInfoboxField(). + NewID(). + Plugin(pluginID). + Extension(pluginExtension1ID). + Property(layer4p.ID()). + MustBuild() + layer4ib := layer.NewInfobox([]*layer.InfoboxField{ + layer4ibf1, + }, scenePropertyID) + layer4 := layer.NewGroup(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer4p.IDRef()). + Infobox(layer4ib). + LinkedDatasetSchema(&dss3id). + Layers(layer.NewIDList([]layer.ID{layer41.ID()})). + MustBuild() + + // layer5: linked layer group and children with overrided property + layer51p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewField(propertySchemaField1ID). + Value(property.NewOptionalValue(property.ValueTypeString, nil)). + Links(property.NewLinks([]*property.Link{ + property.NewLinkFieldOnly(dss1id, ds1f2), + })). + MustBuild(), + }).MustBuild(), + }). + MustBuild() + layer51 := layer.NewItem(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer51p.IDRef()). + LinkedDataset(&ds1id). + MustBuild() + layer5p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewField(propertySchemaField1ID). + Value(property.NewOptionalValue(property.ValueTypeString, nil)). + Links(property.NewLinks([]*property.Link{ + property.NewLinkFieldOnly(dss1id, ds1f1), + property.NewLinkFieldOnly(dss2id, ds2f1), + property.NewLinkFieldOnly(dss3id, ds3f1), + })). + MustBuild(), + property.NewField(propertySchemaField2ID). + Value(property.NewOptionalValue(property.ValueTypeString, nil)). + Links(property.NewLinks([]*property.Link{ + property.NewLinkFieldOnly(dss1id, ds1f1), + property.NewLinkFieldOnly(dss2id, ds2f1), + property.NewLinkFieldOnly(dss3id, ds3f1), + })). + MustBuild(), + }).MustBuild(), + }). + MustBuild() + layer5 := layer.NewGroup(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer5p.IDRef()). + LinkedDatasetSchema(&dss1id). + Layers(layer.NewIDList([]layer.ID{layer51.ID()})). + MustBuild() + layer6p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroupList().NewID().SchemaGroup(propertySchemaGroup2ID).Groups([]*property.Group{ + property.NewGroup().ID(propertyItemID1).SchemaGroup(propertySchemaGroup2ID). + Fields([]*property.Field{ + property.NewField(propertySchemaField1ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("XYZ"))). + MustBuild(), + }).MustBuild(), + property.NewGroup().ID(propertyItemID2).SchemaGroup(propertySchemaGroup2ID). + Fields([]*property.Field{ + property.NewField(propertySchemaField1ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("ZYX"))). + MustBuild(), + }).MustBuild(), + }).MustBuild(), + }). + MustBuild() + layer6 := layer.NewItem(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer6p.IDRef()). + MustBuild() + + // root layer + rootLayer := layer.NewGroup().NewID().Scene(sceneID).Layers(layer.NewIDList([]layer.ID{ + layer1.ID(), + layer2.ID(), + layer3.ID(), + layer4.ID(), + layer5.ID(), + layer6.ID(), + })).MustBuild() + + // scene + scenep := property.New(). + ID(scenePropertyID). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID).Fields([]*property.Field{ + property.NewField(propertySchemaField1ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("hogehoge"))). + MustBuild(), + }).MustBuild(), + }). + MustBuild() + + sceneWidgetID1 := scene.NewWidgetID() + sceneWidgetID2 := scene.NewWidgetID() + sceneWidget1 := scene.MustWidget( + sceneWidgetID1, + pluginID, + pluginExtension1ID, + scenePropertyID, + false, + false) + sceneWidget2 := scene.MustWidget( + sceneWidgetID2, + pluginID, + pluginExtension2ID, + scenePropertyID, + true, + true) + scenePlugin1 := scene.NewPlugin(pluginID, &scenePropertyID) + + assert.Equal(t, sceneWidgetID1, sceneWidget1.ID()) + assert.Equal(t, sceneWidgetID2, sceneWidget2.ID()) + + scene := scene.New(). + ID(sceneID). + Project(scene.NewProjectID()). + Team(scene.NewTeamID()). + Property(scenep.ID()). + Widgets(scene.NewWidgets([]*scene.Widget{ + sceneWidget1, sceneWidget2, + }, nil)). + Plugins(scene.NewPlugins([]*scene.Plugin{scenePlugin1})). + RootLayer(rootLayer.ID()). + MustBuild() + + // loaders + dloader := dataset.List{ + ds1, ds2, ds3, + }.GraphLoader() + lloader := layer.LoaderFrom([]layer.Layer{ + rootLayer, + layer1, + layer2, + layer21, + layer3, + layer4, + layer41, + layer5, + layer51, + layer6, + }) + ploader := property.LoaderFrom([]*property.Property{ + scenep, + layer1p, + layer2p, + layer21p, + layer3p, + layer4p, + layer41p, + layer5p, + layer51p, + layer6p, + }) + tloader := tag.LoaderFrom(tags) + tsloader := tag.SceneLoaderFrom(tags) + + expectedLayer1 := &layerJSON{ + ID: layer1.ID().String(), + PluginID: layer1.Plugin().StringRef(), + ExtensionID: layer1.Extension().StringRef(), + Name: layer1.Name(), + IsVisible: true, + PropertyID: layer1.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "xxx", + "b": float64(1), + }, + }, + Infobox: nil, + Tags: []tagJSON{ + {ID: tag1.ID().String(), Label: tag1.Label(), Tags: nil}, + {ID: tag4.ID().String(), Label: tag4.Label(), Tags: []tagJSON{ + {ID: tag2.ID().String(), Label: tag2.Label(), Tags: nil}, + }}, + }, + Children: nil, + } + + expectedLayer2 := &layerJSON{ + ID: layer2.ID().String(), + PluginID: layer2.Plugin().StringRef(), + ExtensionID: layer2.Extension().StringRef(), + Name: layer2.Name(), + IsVisible: true, + PropertyID: layer2.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "xxx", + "c": "test", + }, + }, + Infobox: &infoboxJSON{ + Fields: []infoboxFieldJSON{ + { + ID: layer2ibf1.ID().String(), + PluginID: layer2ibf1.Plugin().String(), + ExtensionID: layer2ibf1.Extension().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "xxx", + "c": "test", + }, + }, + }, + }, + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + }, + Tags: []tagJSON{ + {ID: tag1.ID().String(), Label: tag1.Label()}, + {ID: tag4.ID().String(), Label: tag4.Label(), Tags: []tagJSON{ + {ID: tag2.ID().String(), Label: tag2.Label()}, + }}, + }, + Children: []*layerJSON{ + { + ID: layer21.ID().String(), + PluginID: layer21.Plugin().StringRef(), + ExtensionID: layer21.Extension().StringRef(), + Name: layer21.Name(), + IsVisible: true, + PropertyID: layer21.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "yyy", + "b": float64(1), + "c": "test", + }, + }, + Infobox: &infoboxJSON{ + Fields: []infoboxFieldJSON{ + { + ID: layer2ibf1.ID().String(), + PluginID: layer2ibf1.Plugin().String(), + ExtensionID: layer2ibf1.Extension().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "xxx", + "c": "test", + }, + }, + }, + }, + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + }, + Tags: []tagJSON{ + {ID: tag2.ID().String(), Label: tag2.Label()}, + }, + }, + }, + } + + expectedLayer3 := &layerJSON{ + ID: layer3.ID().String(), + PluginID: layer3.Plugin().StringRef(), + ExtensionID: layer3.Extension().StringRef(), + Name: layer3.Name(), + IsVisible: true, + PropertyID: layer3.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "b", + }, + }, + Infobox: &infoboxJSON{ + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + Fields: []infoboxFieldJSON{ + { + ID: layer3ibf1.ID().String(), + PluginID: layer3ibf1.Plugin().String(), + ExtensionID: layer3ibf1.Extension().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + }, + }, + }, + } + + expectedLayer4 := &layerJSON{ + ID: layer4.ID().String(), + PluginID: layer4.Plugin().StringRef(), + ExtensionID: layer4.Extension().StringRef(), + Name: layer4.Name(), + IsVisible: true, + PropertyID: layer4.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": nil, + "c": "xxx", + }, + }, + Infobox: &infoboxJSON{ + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + Fields: []infoboxFieldJSON{ + { + ID: layer4ibf1.ID().String(), + PluginID: layer4ibf1.Plugin().String(), + ExtensionID: layer4ibf1.Extension().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": nil, + "c": "xxx", + }, + }, + }, + }, + }, + Children: []*layerJSON{ + { + ID: layer41.ID().String(), + PluginID: layer41.Plugin().StringRef(), + ExtensionID: layer41.Extension().StringRef(), + IsVisible: true, + PropertyID: layer41.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "b", + "b": float64(1), + "c": "xxx", + }, + }, + Infobox: &infoboxJSON{ + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + "b": float64(1), + }, + }, + Fields: []infoboxFieldJSON{ + { + ID: layer41ibf1.ID().String(), + PluginID: layer41ibf1.Plugin().String(), + ExtensionID: layer41ibf1.Extension().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "b": float64(1), + }, + }, + }, + }, + }, + }, + }, + } + + expectedLayer5 := &layerJSON{ + ID: layer5.ID().String(), + PluginID: layer5.Plugin().StringRef(), + ExtensionID: layer5.Extension().StringRef(), + Name: layer5.Name(), + IsVisible: true, + PropertyID: layer5.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": nil, + "b": nil, + }, + }, + Infobox: nil, + Tags: nil, + Children: []*layerJSON{ + { + ID: layer51.ID().String(), + PluginID: layer51.Plugin().StringRef(), + ExtensionID: layer51.Extension().StringRef(), + IsVisible: true, + PropertyID: layer51.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "a", + "b": "b", + }, + }, + }, + }, + } + + expectedLayer6 := &layerJSON{ + ID: layer6.ID().String(), + PluginID: layer6.Plugin().StringRef(), + ExtensionID: layer6.Extension().StringRef(), + Name: layer6.Name(), + IsVisible: true, + PropertyID: layer6.Property().String(), + Property: map[string]interface{}{ + "B": []map[string]interface{}{ + { + "id": propertyItemID1.String(), + "a": "XYZ", + }, + { + "id": propertyItemID2.String(), + "a": "ZYX", + }, + }, + }, + Infobox: nil, + Tags: nil, + Children: nil, + } + + expectedLayers := []*layerJSON{ + expectedLayer1, + expectedLayer2, + expectedLayer3, + expectedLayer4, + expectedLayer5, + expectedLayer6, + } + + expected := &sceneJSON{ + SchemaVersion: version, + ID: sceneID.String(), + PublishedAt: publishedAt, + Layers: expectedLayers, + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + Plugins: map[string]map[string]interface{}{ + pluginID.String(): { + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + }, + Widgets: []*widgetJSON{ + { + ID: sceneWidget2.ID().String(), + PluginID: sceneWidget2.Plugin().String(), + ExtensionID: sceneWidget2.Extension().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + Extended: true, + }, + }, + WidgetAlignSystem: nil, + Tags: []*tagJSON{ + {ID: tag4.ID().String(), Label: tag4.Label(), Tags: []tagJSON{ + {ID: tag1.ID().String(), Label: tag1.Label(), Tags: nil}, + {ID: tag2.ID().String(), Label: tag2.Label(), Tags: nil}, + {ID: tag3.ID().String(), Label: tag3.Label(), Tags: nil}, + }}, + }, + Clusters: []*clusterJSON{}, + } + + // exec + sb := New(lloader, ploader, dloader, tloader, tsloader) + result, err := sb.buildScene(context.Background(), scene, publishedAt) + + assert.NoError(t, err) + assert.Equal(t, expected, result) +} diff --git a/server/pkg/scene/builder/encoder.go b/server/pkg/scene/builder/encoder.go new file mode 100644 index 000000000..1907e194c --- /dev/null +++ b/server/pkg/scene/builder/encoder.go @@ -0,0 +1,182 @@ +package builder + +import ( + "github.com/reearth/reearth-backend/pkg/layer/encoding" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" +) + +var _ encoding.Encoder = &encoder{} + +type encoder struct { + res *layerJSON +} + +func (*encoder) MimeType() string { + return "application/json" +} + +func (e *encoder) Result() []*layerJSON { + if e == nil || e.res == nil { + return nil + } + return e.res.Children +} + +func (e *encoder) Encode(l merging.SealedLayer) (err error) { + if e == nil { + return + } + e.res = e.layer(l) + return +} + +func (e *encoder) layer(layer merging.SealedLayer) *layerJSON { + if layer == nil { + return nil + } + l := layer.Common() + if l == nil { + return nil + } + + var children []*layerJSON + if g := layer.Group(); g != nil { + for _, c := range g.Children { + if d := e.layer(c); d != nil { + children = append(children, d) + } + } + } + + var propertyID string + if l.Property != nil { + propertyID = l.Property.Original.String() + } + + var tags []tagJSON + if len(l.Tags) > 0 { + for _, t := range l.Tags { + var tags2 []tagJSON + if len(t.Tags) > 0 { + tags2 = make([]tagJSON, 0, len(t.Tags)) + for _, t := range t.Tags { + tags2 = append(tags2, tagJSON{ + ID: t.ID.String(), + Label: t.Label, + }) + } + } + tags = append(tags, tagJSON{ + ID: t.ID.String(), + Label: t.Label, + Tags: tags2, + }) + } + } + + return &layerJSON{ + ID: l.Original.String(), + PluginID: l.PluginID.StringRef(), + ExtensionID: l.ExtensionID.StringRef(), + Name: l.Name, + Property: e.property(l.Property), + PropertyID: propertyID, + Infobox: e.infobox(l.Infobox), + IsVisible: l.IsVisible, + Tags: tags, + Children: children, + } +} + +func (e *encoder) infobox(i *merging.SealedInfobox) *infoboxJSON { + if i == nil { + return nil + } + fields := make([]infoboxFieldJSON, 0, len(i.Fields)) + for _, f := range i.Fields { + fields = append(fields, infoboxFieldJSON{ + ID: f.ID.String(), + PluginID: f.Plugin.String(), + ExtensionID: string(f.Extension), + Property: e.property(f.Property), + }) + } + return &infoboxJSON{ + Fields: fields, + Property: e.property(i.Property), + } +} + +func (e *encoder) property(p *property.Sealed) propertyJSON { + return p.Interface() +} + +type layerJSON struct { + ID string `json:"id"` + PluginID *string `json:"pluginId,omitempty"` + ExtensionID *string `json:"extensionId,omitempty"` + Name string `json:"name,omitempty"` + PropertyID string `json:"propertyId,omitempty"` + Property propertyJSON `json:"property,omitempty"` + Infobox *infoboxJSON `json:"infobox,omitempty"` + Tags []tagJSON `json:"tags,omitempty"` + IsVisible bool `json:"isVisible"` + Children []*layerJSON `json:"children,omitempty"` +} + +type tagJSON struct { + ID string `json:"id"` + Label string `json:"label"` + Tags []tagJSON `json:"tags,omitempty"` +} + +type infoboxJSON struct { + Fields []infoboxFieldJSON `json:"fields"` + Property propertyJSON `json:"property"` +} + +type infoboxFieldJSON struct { + ID string `json:"id"` + PluginID string `json:"pluginId"` + ExtensionID string `json:"extensionId"` + Property propertyJSON `json:"property"` +} + +type propertyJSON = map[string]interface{} + +type widgetAlignSystemJSON struct { + Inner *widgetZoneJSON `json:"inner"` + Outer *widgetZoneJSON `json:"outer"` +} + +type widgetZoneJSON struct { + Left *widgetSectionJSON `json:"left"` + Center *widgetSectionJSON `json:"center"` + Right *widgetSectionJSON `json:"right"` +} + +type widgetSectionJSON struct { + Top *widgetAreaJSON `json:"top"` + Middle *widgetAreaJSON `json:"middle"` + Bottom *widgetAreaJSON `json:"bottom"` +} + +type widgetAreaJSON struct { + WidgetIDs []string `json:"widgetIds"` + Align string `json:"align"` +} + +type widgetJSON struct { + ID string `json:"id"` + PluginID string `json:"pluginId"` + ExtensionID string `json:"extensionId"` + Property propertyJSON `json:"property"` + Extended bool `json:"extended"` +} + +type clusterJSON struct { + ID string `json:"id"` + Name string `json:"name"` + Property propertyJSON `json:"property"` +} diff --git a/server/pkg/scene/builder/encoder_test.go b/server/pkg/scene/builder/encoder_test.go new file mode 100644 index 000000000..682289769 --- /dev/null +++ b/server/pkg/scene/builder/encoder_test.go @@ -0,0 +1,187 @@ +package builder + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/stretchr/testify/assert" +) + +func TestEncoder_Result(t *testing.T) { + tests := []struct { + Name string + Target *encoder + Expected []*layerJSON + }{ + { + Name: "nil encoder", + Target: nil, + Expected: nil, + }, + { + Name: "success", + Target: &encoder{ + res: &layerJSON{ + Children: []*layerJSON{ + { + ID: "xxx", + PluginID: nil, + ExtensionID: nil, + Name: "aaa", + Property: nil, + Infobox: nil, + }, + }, + }, + }, + Expected: []*layerJSON{ + { + ID: "xxx", + PluginID: nil, + ExtensionID: nil, + Name: "aaa", + Property: nil, + Infobox: nil, + }, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.Target.Result() + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestEncoder_Encode(t *testing.T) { + tests := []struct { + Name string + Target *encoder + Input merging.SealedLayer + Expected error + }{ + { + Name: "nil encoder", + Target: nil, + Input: nil, + Expected: nil, + }, + { + Name: "success encoding", + Target: &encoder{}, + Input: nil, + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.Target.Encode(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestEncoder_Layers(t *testing.T) { + lid := layer.NewID() + sid := scene.NewID() + pid := property.NewID() + ex := layer.PluginExtensionID("marker") + iid := property.NewItemID() + v1 := property.LatLng{ + Lat: 4.4, + Lng: 53.4, + } + + f1 := property.SealedField{ + ID: property.FieldID("location"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeLatLng, + nil, + property.ValueTypeLatLng.ValueFrom(v1), + ), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: property.SchemaGroupID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + + sp := property.Sealed{ + Original: &pid, + Items: il, + } + sealed := &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Name: "test", + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &layer.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }, + } + + tests := []struct { + Name string + Target *encoder + Input *merging.SealedLayerItem + Expected *layerJSON + }{ + { + Name: "nil layers", + Target: &encoder{}, + Input: nil, + Expected: nil, + }, + { + Name: "success", + Target: &encoder{}, + Input: sealed, + Expected: &layerJSON{ + ID: lid.String(), + PluginID: layer.OfficialPluginID.StringRef(), + ExtensionID: ex.StringRef(), + Name: "test", + PropertyID: pid.String(), + Property: map[string]interface{}{ + "default": map[string]interface{}{ + "location": property.LatLng{Lat: 4.4, Lng: 53.4}, + }, + }, + Infobox: nil, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.Target.layer(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} diff --git a/server/pkg/scene/builder/scene.go b/server/pkg/scene/builder/scene.go new file mode 100644 index 000000000..f74bc5a24 --- /dev/null +++ b/server/pkg/scene/builder/scene.go @@ -0,0 +1,206 @@ +package builder + +import ( + "context" + "time" + + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/tag" +) + +type sceneJSON struct { + SchemaVersion int `json:"schemaVersion"` + ID string `json:"id"` + PublishedAt time.Time `json:"publishedAt"` + Property propertyJSON `json:"property"` + Plugins map[string]propertyJSON `json:"plugins"` + Layers []*layerJSON `json:"layers"` + Widgets []*widgetJSON `json:"widgets"` + WidgetAlignSystem *widgetAlignSystemJSON `json:"widgetAlignSystem"` + Tags []*tagJSON `json:"tags"` + Clusters []*clusterJSON `json:"clusters"` +} + +func (b *Builder) scene(ctx context.Context, s *scene.Scene, publishedAt time.Time, l []*layerJSON, p []*property.Property) (*sceneJSON, error) { + tags, err := b.tags(ctx, s) + if err != nil { + return nil, err + } + + return &sceneJSON{ + SchemaVersion: version, + ID: s.ID().String(), + PublishedAt: publishedAt, + Property: b.property(ctx, findProperty(p, s.Property())), + Plugins: b.plugins(ctx, s, p), + Widgets: b.widgets(ctx, s, p), + Clusters: b.clusters(ctx, s, p), + Layers: l, + Tags: tags, + WidgetAlignSystem: buildWidgetAlignSystem(s.Widgets().Alignment()), + }, nil +} + +func (b *Builder) plugins(ctx context.Context, s *scene.Scene, p []*property.Property) map[string]propertyJSON { + scenePlugins := s.Plugins().Plugins() + res := map[string]propertyJSON{} + for _, sp := range scenePlugins { + if sp == nil { + continue + } + if pp := sp.Property(); pp != nil { + res[sp.Plugin().String()] = b.property(ctx, findProperty(p, *pp)) + } + } + return res +} + +func (b *Builder) widgets(ctx context.Context, s *scene.Scene, p []*property.Property) []*widgetJSON { + sceneWidgets := s.Widgets().Widgets() + res := make([]*widgetJSON, 0, len(sceneWidgets)) + for _, w := range sceneWidgets { + if !w.Enabled() { + continue + } + + res = append(res, &widgetJSON{ + ID: w.ID().String(), + PluginID: w.Plugin().String(), + ExtensionID: string(w.Extension()), + Property: b.property(ctx, findProperty(p, w.Property())), + Extended: w.Extended(), + }) + } + return res +} + +func (b *Builder) clusters(ctx context.Context, s *scene.Scene, p []*property.Property) []*clusterJSON { + sceneClusters := s.Clusters().Clusters() + res := make([]*clusterJSON, 0, len(sceneClusters)) + for _, c := range sceneClusters { + res = append(res, &clusterJSON{ + ID: c.ID().String(), + Name: c.Name(), + Property: b.property(ctx, findProperty(p, c.Property())), + }) + } + return res +} + +func (b *Builder) tags(ctx context.Context, s *scene.Scene) ([]*tagJSON, error) { + tags, err := b.tloader(ctx, s.ID()) + if err != nil { + return nil, err + } + tagMap := tag.MapFromRefList(tags) + rootTags := tag.DerefList(tags).Roots() + stags := make([]*tagJSON, 0, len(rootTags)) + for _, t := range rootTags { + if t == nil { + continue + } + t2 := toTag(t, tagMap) + stags = append(stags, &t2) + } + return stags, nil +} + +func toTag(t tag.Tag, m tag.Map) tagJSON { + var tags []tagJSON + if children := tag.GroupFrom(t).Tags(); children != nil { + tags = make([]tagJSON, 0, len(children)) + for _, tid := range children { + t, ok := m[tid] + if !ok { + continue + } + t2 := toTag(t, m) + tags = append(tags, t2) + } + } + + return tagJSON{ + ID: t.ID().String(), + Label: t.Label(), + Tags: tags, + } +} + +func (b *Builder) property(ctx context.Context, p *property.Property) propertyJSON { + return property.SealProperty(ctx, p).Interface() +} + +func findProperty(pp []*property.Property, i property.ID) *property.Property { + for _, p := range pp { + if p.ID() == i { + return p + } + } + return nil +} + +func toString(wids []scene.WidgetID) []string { + if wids == nil { + return nil + } + docids := make([]string, 0, len(wids)) + for _, wid := range wids { + docids = append(docids, wid.String()) + } + return docids +} + +func buildWidgetAlignSystem(s *scene.WidgetAlignSystem) *widgetAlignSystemJSON { + if s == nil { + return nil + } + was := widgetAlignSystemJSON{ + Inner: buildWidgetZone(s.Zone(scene.WidgetZoneInner)), + Outer: buildWidgetZone(s.Zone(scene.WidgetZoneOuter)), + } + if was.Inner == nil && was.Outer == nil { + return nil + } + return &was +} + +func buildWidgetZone(z *scene.WidgetZone) *widgetZoneJSON { + if z == nil { + return nil + } + zj := widgetZoneJSON{ + Left: buildWidgetSection(z.Section(scene.WidgetSectionLeft)), + Center: buildWidgetSection(z.Section(scene.WidgetSectionCenter)), + Right: buildWidgetSection(z.Section(scene.WidgetSectionRight)), + } + if zj.Left == nil && zj.Center == nil && zj.Right == nil { + return nil + } + return &zj +} + +func buildWidgetSection(s *scene.WidgetSection) *widgetSectionJSON { + if s == nil { + return nil + } + sj := widgetSectionJSON{ + Middle: buildWidgetArea(s.Area(scene.WidgetAreaMiddle)), + Top: buildWidgetArea(s.Area(scene.WidgetAreaTop)), + Bottom: buildWidgetArea(s.Area(scene.WidgetAreaBottom)), + } + if sj.Top == nil && sj.Middle == nil && sj.Bottom == nil { + return nil + } + return &sj +} + +func buildWidgetArea(a *scene.WidgetArea) *widgetAreaJSON { + if a == nil || len(a.WidgetIDs()) == 0 { + return nil + } + return &widgetAreaJSON{ + WidgetIDs: toString(a.WidgetIDs()), + Align: string(a.Alignment()), + } +} diff --git a/server/pkg/scene/builder/scene_test.go b/server/pkg/scene/builder/scene_test.go new file mode 100644 index 000000000..7fd36fafd --- /dev/null +++ b/server/pkg/scene/builder/scene_test.go @@ -0,0 +1,130 @@ +package builder + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/stretchr/testify/assert" +) + +func TestScene_FindProperty(t *testing.T) { + p1 := property.NewID() + sid := scene.NewID() + scid := property.MustSchemaID("xx~1.0.0/aa") + pl := []*property.Property{ + property.New().NewID().Scene(sid).Schema(scid).MustBuild(), + property.New().ID(p1).Scene(sid).Schema(scid).MustBuild(), + } + + tests := []struct { + Name string + PL []*property.Property + Input property.ID + Expected *property.Property + }{ + { + Name: "Found", + PL: pl, + Input: p1, + Expected: property.New().Scene(sid).Schema(scid).ID(p1).MustBuild(), + }, + { + Name: " NotFound", + PL: pl, + Input: property.NewID(), + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := findProperty(tc.PL, tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestScene_ToString(t *testing.T) { + wid := scene.NewWidgetID() + widS := wid.String() + wid2 := scene.NewWidgetID() + wid2S := wid2.String() + wid3 := scene.NewWidgetID() + wid3S := wid3.String() + wids := []scene.WidgetID{wid, wid2, wid3} + widsString := []string{widS, wid2S, wid3S} + + tests := []struct { + Name string + Input []scene.WidgetID + Expected []string + }{ + { + Name: "Convert a slice of scene.WidgetID to a slice of strings", + Input: wids, + Expected: widsString, + }, + { + Name: "Return nil when no WidgetIDs are inputted", + Input: nil, + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := toString(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestBuildWidgetAlignSystem(t *testing.T) { + wid := scene.NewWidgetID() + was := scene.NewWidgetAlignSystem() + was.Area(scene.WidgetLocation{ + Zone: scene.WidgetZoneInner, + Section: scene.WidgetSectionLeft, + Area: scene.WidgetAreaTop, + }).Add(wid, -1) + + tests := []struct { + Name string + Input *scene.WidgetAlignSystem + Expected *widgetAlignSystemJSON + }{ + { + Name: "works", + Input: was, + Expected: &widgetAlignSystemJSON{ + Inner: &widgetZoneJSON{ + Left: &widgetSectionJSON{ + Top: &widgetAreaJSON{ + WidgetIDs: []string{wid.String()}, + Align: "start", + }, + }, + }, + }, + }, + { + Name: "nil", + Input: nil, + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := buildWidgetAlignSystem(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} diff --git a/server/pkg/scene/builder_test.go b/server/pkg/scene/builder_test.go new file mode 100644 index 000000000..9ee94b06a --- /dev/null +++ b/server/pkg/scene/builder_test.go @@ -0,0 +1,306 @@ +package scene + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestBuilder_IDs(t *testing.T) { + tid := NewTeamID() + lid := NewLayerID() + b := New().NewID().RootLayer(lid).Team(tid).MustBuild() + assert.NotNil(t, b.ID()) + assert.Equal(t, tid, b.Team()) + assert.Equal(t, lid, b.RootLayer()) + sid := NewID() + b2 := New().ID(sid).RootLayer(lid).Team(tid).MustBuild() + assert.Equal(t, sid, b2.ID()) +} + +func TestBuilder_UpdatedAt(t *testing.T) { + ti := time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC) + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).UpdatedAt(ti).MustBuild() + assert.Equal(t, ti, b.UpdatedAt()) +} + +func TestBuilder_Property(t *testing.T) { + pid := NewPropertyID() + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).Property(pid).MustBuild() + assert.Equal(t, pid, b.Property()) +} + +func TestBuilder_Plugins(t *testing.T) { + ps := NewPlugins([]*Plugin{ + NewPlugin(OfficialPluginID, NewPropertyID().Ref()), + }) + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).Plugins(ps).MustBuild() + assert.Equal(t, ps, b.Plugins()) +} + +func TestBuilder_Project(t *testing.T) { + pid := NewProjectID() + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).Project(pid).MustBuild() + assert.Equal(t, pid, b.Project()) +} + +func TestBuilder_Widgets(t *testing.T) { + ws := NewWidgets([]*Widget{ + MustWidget(NewWidgetID(), OfficialPluginID, "xxx", NewPropertyID(), true, false), + }, nil) + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).Widgets(ws).MustBuild() + assert.Equal(t, ws, b.Widgets()) +} + +func TestBuilder_Build(t *testing.T) { + tid := NewTeamID() + sid := NewID() + pid := NewProjectID() + ppid := NewPropertyID() + lid := NewLayerID() + ws := NewWidgets([]*Widget{ + MustWidget(NewWidgetID(), OfficialPluginID, "xxx", ppid, true, false), + }, nil) + ps := NewPlugins([]*Plugin{ + NewPlugin(OfficialPluginID, ppid.Ref()), + }) + + type args struct { + ID ID + Project ProjectID + Team TeamID + RootLayer LayerID + Widgets *Widgets + Plugins *Plugins + UpdatedAt time.Time + Property PropertyID + } + + tests := []struct { + Name string + Args args + Expected *Scene + Err error + }{ + { + Name: "fail nil scene id", + Args: args{ + ID: ID{}, + Project: pid, + Team: tid, + RootLayer: lid, + Widgets: ws, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Err: ErrInvalidID, + }, + { + Name: "fail nil team id", + Args: args{ + ID: sid, + Project: pid, + Team: TeamID{}, + RootLayer: lid, + Widgets: ws, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Err: ErrInvalidID, + }, + { + Name: "fail nil root layer id", + Args: args{ + ID: sid, + Project: pid, + Team: tid, + RootLayer: LayerID{}, + Widgets: ws, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Err: ErrInvalidID, + }, + { + Name: "success build new scene", + Args: args{ + ID: sid, + Project: pid, + Team: tid, + RootLayer: lid, + Widgets: ws, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Expected: &Scene{ + id: sid, + project: pid, + team: tid, + rootLayer: lid, + widgets: ws, + plugins: ps, + updatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + property: ppid, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := New(). + ID(tt.Args.ID). + Widgets(tt.Args.Widgets). + Project(tt.Args.Project). + Plugins(tt.Args.Plugins). + Property(tt.Args.Property). + RootLayer(tt.Args.RootLayer). + Team(tt.Args.Team). + UpdatedAt(tt.Args.UpdatedAt). + Build() + + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) + } else { + assert.Equal(t, tt.Err, err) + } + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + tid := NewTeamID() + sid := NewID() + pid := NewProjectID() + ppid := NewPropertyID() + lid := NewLayerID() + ws := NewWidgets([]*Widget{ + MustWidget(NewWidgetID(), OfficialPluginID, "xxx", ppid, true, false), + }, nil) + was := NewWidgetAlignSystem() + ps := NewPlugins([]*Plugin{ + NewPlugin(OfficialPluginID, ppid.Ref()), + }) + + type args struct { + ID ID + Project ProjectID + Team TeamID + RootLayer LayerID + Widgets *Widgets + WidgetAlignSystem *WidgetAlignSystem + Plugins *Plugins + UpdatedAt time.Time + Property PropertyID + } + + tests := []struct { + Name string + Args args + Expected *Scene + Err error + }{ + { + Name: "fail nil scene id", + Args: args{ + ID: ID{}, + Project: pid, + Team: tid, + RootLayer: lid, + Widgets: ws, + WidgetAlignSystem: was, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Err: ErrInvalidID, + }, + { + Name: "fail nil team id", + Args: args{ + ID: sid, + Project: pid, + Team: TeamID{}, + RootLayer: lid, + Widgets: ws, + WidgetAlignSystem: was, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Err: ErrInvalidID, + }, + { + Name: "fail nil root layer id", + Args: args{ + ID: sid, + Project: pid, + Team: tid, + RootLayer: LayerID{}, + Widgets: ws, + WidgetAlignSystem: was, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Err: ErrInvalidID, + }, + { + Name: "success build new scene", + Args: args{ + ID: sid, + Project: pid, + Team: tid, + RootLayer: lid, + Widgets: ws, + WidgetAlignSystem: was, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Expected: &Scene{ + id: sid, + project: pid, + team: tid, + rootLayer: lid, + widgets: ws, + plugins: ps, + updatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + property: ppid, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + build := func() *Scene { + t.Helper() + return New(). + ID(tt.Args.ID). + Widgets(tt.Args.Widgets). + Project(tt.Args.Project). + Plugins(tt.Args.Plugins). + Property(tt.Args.Property). + RootLayer(tt.Args.RootLayer). + Team(tt.Args.Team). + UpdatedAt(tt.Args.UpdatedAt). + MustBuild() + } + + if tt.Err != nil { + assert.PanicsWithValue(t, tt.Err, func() { _ = build() }) + } else { + assert.Equal(t, tt.Expected, build()) + } + }) + } +} diff --git a/server/pkg/scene/cluster.go b/server/pkg/scene/cluster.go new file mode 100644 index 000000000..168884981 --- /dev/null +++ b/server/pkg/scene/cluster.go @@ -0,0 +1,53 @@ +package scene + +type Cluster struct { + id ClusterID + name string + property PropertyID +} + +func NewCluster(cid ClusterID, name string, pid PropertyID) (*Cluster, error) { + if cid.IsNil() { + return nil, ErrInvalidID + } + return &Cluster{ + id: cid, + name: name, + property: pid, + }, nil +} + +func (c *Cluster) ID() ClusterID { + if c == nil { + return ClusterID{} + } + return c.id +} + +func (c *Cluster) Name() string { + if c == nil { + return "" + } + return c.name +} + +func (c *Cluster) Property() PropertyID { + if c == nil { + return PropertyID{} + } + return c.property +} + +func (c *Cluster) Rename(name string) { + if c == nil { + return + } + c.name = name +} + +func (c *Cluster) UpdateProperty(pid PropertyID) { + if c == nil { + return + } + c.property = pid +} diff --git a/server/pkg/scene/cluster_list.go b/server/pkg/scene/cluster_list.go new file mode 100644 index 000000000..2addbc3af --- /dev/null +++ b/server/pkg/scene/cluster_list.go @@ -0,0 +1,76 @@ +package scene + +type ClusterList struct { + clusters []*Cluster +} + +func NewClusterList() *ClusterList { + return &ClusterList{} +} + +func NewClusterListFrom(clusters []*Cluster) *ClusterList { + return &ClusterList{clusters: append([]*Cluster{}, clusters...)} +} + +func (tl *ClusterList) Clusters() []*Cluster { + if tl == nil { + return nil + } + return append([]*Cluster{}, tl.clusters...) +} + +func (tl *ClusterList) Has(tid ClusterID) bool { + if tl == nil { + return false + } + for _, cluster := range tl.clusters { + if cluster.ID() == tid { + return true + } + } + return false +} + +func (tl *ClusterList) Add(clusters ...*Cluster) { + if tl == nil { + return + } + tl.clusters = append(tl.clusters, clusters...) +} + +func (tl *ClusterList) Get(cid ClusterID) *Cluster { + if tl == nil { + return nil + } + for _, c := range tl.clusters { + if c.ID() == cid { + return c + } + } + return nil +} + +func (tl *ClusterList) Remove(clusters ...ClusterID) { + if tl == nil { + return + } + for i := 0; i < len(tl.clusters); i++ { + for _, tid := range clusters { + if tl.clusters[i].id == tid { + tl.clusters = append(tl.clusters[:i], tl.clusters[i+1:]...) + i-- + } + } + } +} + +func (tl *ClusterList) Properties() []PropertyID { + if tl == nil { + return nil + } + res := make([]PropertyID, 0, len(tl.clusters)) + for _, c := range tl.clusters { + res = append(res, c.property) + } + return res +} diff --git a/server/pkg/scene/cluster_list_test.go b/server/pkg/scene/cluster_list_test.go new file mode 100644 index 000000000..548aca8f2 --- /dev/null +++ b/server/pkg/scene/cluster_list_test.go @@ -0,0 +1,224 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestList_Add(t *testing.T) { + c1, _ := NewCluster(NewClusterID(), "c1", NewPropertyID()) + c2, _ := NewCluster(NewClusterID(), "c2", NewPropertyID()) + type args struct { + clusters []*Cluster + } + + tests := []struct { + name string + list *ClusterList + args args + want *ClusterList + }{ + { + name: "should add a new cluster", + list: &ClusterList{clusters: []*Cluster{c1}}, + args: args{clusters: []*Cluster{c2}}, + want: NewClusterListFrom([]*Cluster{c1, c2}), + }, + { + name: "nil_list: should not add a new cluster", + list: nil, + args: args{clusters: []*Cluster{c1}}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + tc.list.Add(tc.args.clusters...) + assert.Equal(t, tc.want, tc.list) + }) + } +} + +func TestList_Clusters(t *testing.T) { + c1, _ := NewCluster(NewClusterID(), "ccc", NewPropertyID()) + c2, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) + + tests := []struct { + name string + list *ClusterList + want []*Cluster + }{ + { + name: "should return clusters", + list: NewClusterListFrom([]*Cluster{c1, c2}), + want: []*Cluster{c1, c2}, + }, + { + name: "nil_list: should return nil", + list: nil, + want: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.want, tc.list.Clusters()) + }) + } +} + +func TestList_Has(t *testing.T) { + c1, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) + + type args struct { + tid ClusterID + } + + tests := []struct { + name string + list *ClusterList + args args + want bool + }{ + { + name: "should return true", + list: NewClusterListFrom([]*Cluster{c1}), + args: args{ + tid: c1.ID(), + }, + want: true, + }, + { + name: "not existing: should return false", + list: NewClusterListFrom([]*Cluster{c1}), + args: args{ + tid: NewClusterID(), + }, + want: false, + }, + { + name: "nil_list: should return false", + args: args{ + tid: c1.ID(), + }, + want: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.want, tc.list.Has(tc.args.tid)) + }) + } +} + +func TestList_Remove(t *testing.T) { + c1, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) + c2, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) + c3, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) + + type args struct { + cluster ClusterID + } + + tests := []struct { + name string + list *ClusterList + args args + want *ClusterList + }{ + { + name: "should remove a cluster", + list: NewClusterListFrom([]*Cluster{c1, c2, c3}), + args: args{ + cluster: c3.ID(), + }, + want: NewClusterListFrom([]*Cluster{c1, c2}), + }, + { + name: "not existing: should remove nothing", + list: NewClusterListFrom([]*Cluster{c1, c2}), + args: args{ + cluster: c3.ID(), + }, + want: NewClusterListFrom([]*Cluster{c1, c2}), + }, + { + name: "nil_list: return nothing", + args: args{ + cluster: c1.ID(), + }, + want: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.list.Remove(tc.args.cluster) + assert.Equal(t, tc.want, tc.list) + }) + } +} + +func TestClusterList_Get(t *testing.T) { + cid1 := NewClusterID() + cid2 := NewClusterID() + cid3 := NewClusterID() + c1, _ := NewCluster(cid1, "xxx", NewPropertyID()) + c2, _ := NewCluster(cid2, "zzz", NewPropertyID()) + c3, _ := NewCluster(cid3, "yyy", NewPropertyID()) + type args struct { + cid ClusterID + } + + tests := []struct { + name string + list *ClusterList + args args + want *Cluster + }{ + { + name: "should get a cluster", + list: NewClusterListFrom([]*Cluster{c1, c2, c3}), + args: args{ + cid: cid1, + }, + want: c1, + }, + { + name: "not existing: should get nil", + list: NewClusterListFrom([]*Cluster{c2, c3}), + args: args{ + cid: cid1, + }, + want: nil, + }, + { + name: "nil_list: should return nil", + list: nil, + args: args{ + cid: cid1, + }, + want: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + got := tc.list.Get(tc.args.cid) + assert.Equal(t, tc.want, got) + }) + } +} diff --git a/server/pkg/scene/cluster_test.go b/server/pkg/scene/cluster_test.go new file mode 100644 index 000000000..77127e2f8 --- /dev/null +++ b/server/pkg/scene/cluster_test.go @@ -0,0 +1,255 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCluster_ID(t *testing.T) { + cid := NewClusterID() + clusterA := &Cluster{ + id: cid, + } + + tests := []struct { + name string + cluster *Cluster + want ClusterID + }{ + { + name: "should return cluster id", + cluster: clusterA, + want: cid, + }, + { + name: "should return empty if cluster is nil", + cluster: nil, + want: ClusterID{}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + got := tc.cluster.ID() + assert.Equal(t, tc.want, got) + }) + } +} + +func TestCluster_Name(t *testing.T) { + clusterA := &Cluster{ + name: "clusterA", + } + + tests := []struct { + name string + cluster *Cluster + want string + }{ + { + name: "should return cluster name", + cluster: clusterA, + want: "clusterA", + }, + { + name: "should return empty if cluster is nil", + cluster: nil, + want: "", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + got := tc.cluster.Name() + assert.Equal(t, tc.want, got) + }) + } +} +func TestCluster_Property(t *testing.T) { + propertyId := NewPropertyID() + clusterA := &Cluster{ + property: propertyId, + } + + tests := []struct { + name string + cluster *Cluster + want PropertyID + }{ + { + name: "should return cluster property", + cluster: clusterA, + want: propertyId, + }, + { + name: "should return empty cluster property", + cluster: nil, + want: PropertyID{}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + got := tc.cluster.Property() + assert.Equal(t, tc.want, got) + }) + } +} + +func TestNew(t *testing.T) { + propertyId := NewPropertyID() + clusterId := NewClusterID() + type args struct { + cid ClusterID + name string + pid PropertyID + } + + tests := []struct { + name string + args args + want *Cluster + wantErr bool + }{ + { + name: "should create a new cluster", + args: args{ + cid: clusterId, + name: "ccc", + pid: propertyId, + }, + want: &Cluster{ + id: clusterId, + name: "ccc", + property: propertyId, + }, + wantErr: false, + }, + { + name: "should return invalid id error", + args: args{ + cid: ClusterID{}, + name: "xxx", + pid: propertyId, + }, + want: nil, + wantErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + got, err := NewCluster(tc.args.cid, tc.args.name, tc.args.pid) + assert.Equal(t, tc.wantErr, err != nil) + assert.Equal(t, tc.want, got) + }) + } +} + +func TestCluster_Rename(t *testing.T) { + propertyId := NewPropertyID() + clusterId := NewClusterID() + + type args struct { + name string + } + + tests := []struct { + name string + cluster *Cluster + args args + want *Cluster + }{ + { + name: "should change the name", + cluster: &Cluster{ + id: clusterId, + name: "ccc", + property: propertyId, + }, + args: args{ + name: "new name", + }, + want: &Cluster{ + id: clusterId, + name: "new name", + property: propertyId, + }, + }, + { + name: "shouldn't change the name", + args: args{ + name: "xxx", + }, + want: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.cluster.Rename(tc.args.name) + assert.Equal(t, tc.want, tc.cluster) + }) + } +} + +func TestCluster_UpdateProperty(t *testing.T) { + propertyId := NewPropertyID() + propertyId2 := NewPropertyID() + clusterId := NewClusterID() + + type args struct { + property PropertyID + } + + tests := []struct { + name string + cluster *Cluster + args args + want *Cluster + }{ + { + name: "should update the property", + cluster: &Cluster{ + id: clusterId, + name: "ccc", + property: propertyId, + }, + args: args{ + property: propertyId2, + }, + want: &Cluster{ + id: clusterId, + name: "ccc", + property: propertyId2, + }, + }, + { + name: "shouldn't update the property", + args: args{ + property: propertyId2, + }, + want: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.cluster.UpdateProperty(tc.args.property) + assert.Equal(t, tc.want, tc.cluster) + }) + } +} diff --git a/server/pkg/scene/id.go b/server/pkg/scene/id.go new file mode 100644 index 000000000..b14133c8f --- /dev/null +++ b/server/pkg/scene/id.go @@ -0,0 +1,57 @@ +package scene + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +type ID = id.SceneID +type WidgetID = id.WidgetID +type ClusterID = id.ClusterID +type LayerID = id.LayerID +type PropertyID = id.PropertyID +type PluginID = id.PluginID +type PluginExtensionID = id.PluginExtensionID +type ProjectID = id.ProjectID +type TeamID = id.TeamID + +type IDList = id.SceneIDList +type WidgetIDList = id.WidgetIDList + +var NewID = id.NewSceneID +var NewWidgetID = id.NewWidgetID +var NewClusterID = id.NewClusterID +var NewLayerID = id.NewLayerID +var NewPropertyID = id.NewPropertyID +var NewPluginID = id.NewPluginID +var NewProjectID = id.NewProjectID +var NewTeamID = id.NewTeamID + +var MustID = id.MustSceneID +var MustWidgetID = id.MustWidgetID +var MustClusterID = id.MustClusterID +var MustLayerID = id.MustLayerID +var MustPropertyID = id.MustPropertyID +var MustPluginID = id.MustPluginID +var MustProjectID = id.MustProjectID +var MustTeamID = id.MustTeamID + +var IDFrom = id.SceneIDFrom +var WidgetIDFrom = id.WidgetIDFrom +var ClusterIDFrom = id.ClusterIDFrom +var LayerIDFrom = id.LayerIDFrom +var PropertyIDFrom = id.PropertyIDFrom +var PluginIDFrom = id.PluginIDFrom +var ProjectIDFrom = id.ProjectIDFrom +var TeamIDFrom = id.TeamIDFrom + +var IDFromRef = id.SceneIDFromRef +var WidgetIDFromRef = id.WidgetIDFromRef +var ClusterIDFromRef = id.ClusterIDFromRef +var LayerIDFromRef = id.LayerIDFromRef +var PropertyIDFromRef = id.PropertyIDFromRef +var PluginIDFromRef = id.PluginIDFromRef +var ProjectIDFromRef = id.ProjectIDFromRef +var TeamIDFromRef = id.TeamIDFromRef + +var OfficialPluginID = id.OfficialPluginID +var ErrInvalidID = id.ErrInvalidID diff --git a/server/pkg/scene/list.go b/server/pkg/scene/list.go new file mode 100644 index 000000000..2a05b1897 --- /dev/null +++ b/server/pkg/scene/list.go @@ -0,0 +1,51 @@ +package scene + +type List []*Scene + +func (l List) IDs() []ID { + if l == nil { + return nil + } + + res := make([]ID, 0, len(l)) + for _, s := range l { + res = append(res, s.ID()) + } + return res +} + +func (l List) FilterByID(ids ...ID) List { + if l == nil { + return nil + } + + res := make(List, 0, len(l)) + for _, s := range l { + sid2 := s.ID() + for _, sid := range ids { + if sid == sid2 { + res = append(res, s) + break + } + } + } + return res +} + +func (l List) FilterByTeam(teams ...TeamID) List { + if l == nil { + return nil + } + + res := make(List, 0, len(l)) + for _, s := range l { + st := s.Team() + for _, t := range teams { + if t == st { + res = append(res, s) + break + } + } + } + return res +} diff --git a/server/pkg/scene/list_test.go b/server/pkg/scene/list_test.go new file mode 100644 index 000000000..c1e53d2e4 --- /dev/null +++ b/server/pkg/scene/list_test.go @@ -0,0 +1,44 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestList_FilterByID(t *testing.T) { + sid1 := NewID() + sid2 := NewID() + t1 := &Scene{id: sid1} + t2 := &Scene{id: sid2} + + assert.Equal(t, List{t1}, List{t1, t2}.FilterByID(sid1)) + assert.Equal(t, List{t2}, List{t1, t2}.FilterByID(sid2)) + assert.Equal(t, List{t1, t2}, List{t1, t2}.FilterByID(sid1, sid2)) + assert.Equal(t, List{}, List{t1, t2}.FilterByID(NewID())) + assert.Equal(t, List(nil), List(nil).FilterByID(sid1)) +} + +func TestList_FilterByTeam(t *testing.T) { + tid1 := NewTeamID() + tid2 := NewTeamID() + t1 := &Scene{id: NewID(), team: tid1} + t2 := &Scene{id: NewID(), team: tid2} + + assert.Equal(t, List{t1}, List{t1, t2}.FilterByTeam(tid1)) + assert.Equal(t, List{t2}, List{t1, t2}.FilterByTeam(tid2)) + assert.Equal(t, List{t1, t2}, List{t1, t2}.FilterByTeam(tid1, tid2)) + assert.Equal(t, List{}, List{t1, t2}.FilterByTeam(NewTeamID())) + assert.Equal(t, List(nil), List(nil).FilterByTeam(tid1)) +} + +func TestTeamList_IDs(t *testing.T) { + sid1 := NewID() + sid2 := NewID() + t1 := &Scene{id: sid1} + t2 := &Scene{id: sid2} + + assert.Equal(t, []ID{sid1, sid2}, List{t1, t2}.IDs()) + assert.Equal(t, []ID{}, List{}.IDs()) + assert.Equal(t, []ID(nil), List(nil).IDs()) +} diff --git a/server/pkg/scene/lock.go b/server/pkg/scene/lock.go new file mode 100644 index 000000000..b99d18e31 --- /dev/null +++ b/server/pkg/scene/lock.go @@ -0,0 +1,43 @@ +package scene + +// LockMode ใฏใ‚ทใƒผใƒณใฎใƒญใƒƒใ‚ฏ็Šถๆ…‹ใ‚’่กจใ—ใพใ™ใ€‚ +type LockMode string + +const ( + // LockModeFree ใฏใƒญใƒƒใ‚ฏใŒใ‹ใ‹ใฃใฆใ„ใชใ„็Šถๆ…‹ใงใ™ใ€‚ + LockModeFree LockMode = "" + // LockModePending ใฏๅ‡ฆ็†ๅพ…ใกไธญใงใ™ใ€‚ใƒ‡ใƒผใ‚ฟใฎๅค‰ๆ›ดใฏ็„กๅˆถ้™ใซๅค‰ๆ›ดใฏใงใใพใ™ใ€‚ + LockModePending LockMode = "pending" + // LockModePluginUpgrading ใฏใƒ—ใƒฉใ‚ฐใ‚คใƒณใ‚’ใ‚ขใƒƒใƒ—ใ‚ฐใƒฌใƒผใƒ‰ไธญใงใ™ใ€‚ใ‚ทใƒผใƒณใธใฎๅ„็จฎๆ“ไฝœใŒใงใใพใ›ใ‚“ใ€‚ + LockModePluginUpgrading LockMode = "plugin upgrading" + // LockModeDatasetSyncing ใฏใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ๅŒๆœŸไธญใงใ™ใ€‚ใ‚ทใƒผใƒณใธใฎๅ„็จฎๆ“ไฝœใŒใงใใพใ›ใ‚“ใ€‚ + LockModeDatasetSyncing LockMode = "dataset syncing" + // LockModePublishing ใฏใ‚ทใƒผใƒณใ‚’ๆ›ธใๅ‡บใ—ไธญใงใ™ใ€‚ใ‚ทใƒผใƒณใธใฎๅ„็จฎๆ“ไฝœใŒใงใใพใ›ใ‚“ใ€‚ + LockModePublishing LockMode = "publishing" +) + +func (l LockMode) IsLocked() bool { + switch l { + case LockModeFree: + return false + case LockModePending: + return false + } + return true +} + +func (l LockMode) Validate() (LockMode, bool) { + switch l { + case LockModeFree: + fallthrough + case LockModePending: + fallthrough + case LockModePluginUpgrading: + fallthrough + case LockModeDatasetSyncing: + fallthrough + case LockModePublishing: + return l, true + } + return l, false +} diff --git a/server/pkg/scene/lock_test.go b/server/pkg/scene/lock_test.go new file mode 100644 index 000000000..2410090e3 --- /dev/null +++ b/server/pkg/scene/lock_test.go @@ -0,0 +1,88 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLockMode_IsLocked(t *testing.T) { + tests := []struct { + Name string + LM LockMode + Expected bool + }{ + { + Name: "unlocked free", + LM: LockModeFree, + Expected: false, + }, + { + Name: "unlocked pending", + LM: LockModePending, + Expected: false, + }, + { + Name: "locked", + LM: LockModePublishing, + Expected: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.LM.IsLocked() + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestLockMode_Validate(t *testing.T) { + tests := []struct { + Name string + LM LockMode + Expected bool + }{ + { + Name: "valid free", + LM: LockModeFree, + Expected: true, + }, + { + Name: "valid pending", + LM: LockModePending, + Expected: true, + }, + { + Name: "valid publishing", + LM: LockModePublishing, + Expected: true, + }, + { + Name: "valid upgrading", + LM: LockModePluginUpgrading, + Expected: true, + }, + { + Name: "valid syncing", + LM: LockModeDatasetSyncing, + Expected: true, + }, + { + Name: "invalid", + LM: "xxx", + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + _, res := tc.LM.Validate() + assert.Equal(t, tc.Expected, res) + }) + } +} diff --git a/server/pkg/scene/plugin.go b/server/pkg/scene/plugin.go new file mode 100644 index 000000000..3a5900bb1 --- /dev/null +++ b/server/pkg/scene/plugin.go @@ -0,0 +1,44 @@ +package scene + +type Plugin struct { + plugin PluginID + property *PropertyID +} + +func NewPlugin(plugin PluginID, property *PropertyID) *Plugin { + return &Plugin{ + plugin: plugin, + property: property.CopyRef(), + } +} + +func (s *Plugin) Plugin() PluginID { + if s == nil { + return PluginID{} + } + return s.plugin +} + +func (s *Plugin) PluginRef() *PluginID { + if s == nil { + return nil + } + return s.plugin.Ref() +} + +func (s *Plugin) Property() *PropertyID { + if s == nil { + return nil + } + return s.property.CopyRef() +} + +func (s *Plugin) Clone() *Plugin { + if s == nil { + return nil + } + return &Plugin{ + plugin: s.plugin.Clone(), + property: s.property.CopyRef(), + } +} diff --git a/server/pkg/scene/plugin_test.go b/server/pkg/scene/plugin_test.go new file mode 100644 index 000000000..9df0fb896 --- /dev/null +++ b/server/pkg/scene/plugin_test.go @@ -0,0 +1,27 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestPlugin(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + res := NewPlugin(pid, pr) + assert.Equal(t, &Plugin{ + plugin: pid, + property: pr, + }, res) + assert.Equal(t, pid, res.Plugin()) + assert.Equal(t, &pid, res.PluginRef()) + assert.Equal(t, pr, res.Property()) + + cl := res.Clone() + assert.Equal(t, res, cl) + assert.NotSame(t, res, cl) + + assert.Nil(t, (*Plugin)(nil).PluginRef()) +} diff --git a/server/pkg/scene/plugins.go b/server/pkg/scene/plugins.go new file mode 100644 index 000000000..de525e534 --- /dev/null +++ b/server/pkg/scene/plugins.go @@ -0,0 +1,141 @@ +package scene + +type Plugins struct { + plugins []*Plugin +} + +func NewPlugins(plugins []*Plugin) *Plugins { + if plugins == nil { + return &Plugins{plugins: []*Plugin{}} + } + p2 := make([]*Plugin, 0, len(plugins)) + for _, p1 := range plugins { + if p1 == nil { + continue + } + duplicated := false + for _, p3 := range p2 { + if p1.plugin.Equal(p3.plugin) { + duplicated = true + break + } + } + if !duplicated { + p2 = append(p2, p1) + } + } + return &Plugins{plugins: p2} +} + +func (p *Plugins) Plugins() []*Plugin { + return append([]*Plugin{}, p.plugins...) +} + +func (p *Plugins) Property(id PluginID) *PropertyID { + for _, p := range p.plugins { + if p.plugin.Equal(id) { + return p.property.CopyRef() + } + } + return nil +} + +func (p *Plugins) Has(id PluginID) bool { + for _, p2 := range p.plugins { + if p2.plugin.Equal(id) { + return true + } + } + return false +} + +func (p *Plugins) HasPlugin(id PluginID) bool { + for _, p2 := range p.plugins { + if p2.plugin.Equal(id) { + return true + } + } + return false +} + +func (p *Plugins) HasPluginByName(name string) bool { + for _, p2 := range p.plugins { + if p2.plugin.Name() == name { + return true + } + } + return false +} + +func (p *Plugins) Add(sp *Plugin) { + if sp == nil || p.HasPluginByName(sp.plugin.Name()) || sp.plugin.Equal(OfficialPluginID) { + return + } + p.plugins = append(p.plugins, sp) +} + +func (p *Plugins) Remove(pid PluginID) { + if pid.Equal(OfficialPluginID) { + return + } + for i, p2 := range p.plugins { + if p2.plugin.Equal(pid) { + p.plugins = append(p.plugins[:i], p.plugins[i+1:]...) + return + } + } +} + +func (p *Plugins) Upgrade(from, to PluginID, pr *PropertyID, deleteProperty bool) { + if p == nil || from.IsNil() || to.IsNil() { + return + } + + for i, p2 := range p.plugins { + if p2.plugin.Equal(OfficialPluginID) { + continue + } + if p2.plugin.Equal(from) { + var newpr *PropertyID + if !deleteProperty { + newpr = pr.CopyRef() + if newpr == nil { + newpr = p2.property.CopyRef() + } + } + p.plugins[i] = &Plugin{plugin: to, property: newpr} + return + } + } +} + +func (p *Plugins) Properties() []PropertyID { + if p == nil { + return nil + } + res := make([]PropertyID, 0, len(p.plugins)) + for _, pp := range p.plugins { + if pp.property != nil { + res = append(res, *pp.property) + } + } + return res +} + +func (p *Plugins) Plugin(pluginID PluginID) *Plugin { + for _, pp := range p.plugins { + if pp.plugin.Equal(pluginID) { + return pp + } + } + return nil +} + +func (p *Plugins) PluginByName(name string) *Plugin { + for _, pp := range p.plugins { + if pp.plugin.Name() == name { + return pp + } + } + return nil +} diff --git a/server/pkg/scene/plugins_test.go b/server/pkg/scene/plugins_test.go new file mode 100644 index 000000000..a2eb40507 --- /dev/null +++ b/server/pkg/scene/plugins_test.go @@ -0,0 +1,467 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewPlugins(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input []*Plugin + Expected *Plugins + }{ + { + Name: "nil plugin list", + Input: nil, + Expected: &Plugins{plugins: []*Plugin{}}, + }, + { + Name: "plugin list with nil", + Input: []*Plugin{nil}, + Expected: &Plugins{plugins: []*Plugin{}}, + }, + { + Name: "plugin list with matched values", + Input: []*Plugin{ + { + plugin: pid, + property: pr, + }, + }, + Expected: &Plugins{plugins: []*Plugin{ + NewPlugin(pid, pr), + }}, + }, + { + Name: "plugin list with duplicated values", + Input: []*Plugin{ + { + plugin: pid, + property: pr, + }, + { + plugin: pid, + property: pr, + }, + }, + Expected: &Plugins{plugins: []*Plugin{ + NewPlugin(pid, pr), + }}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := NewPlugins(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_Property(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input PluginID + PS *Plugins + Expected *PropertyID + }{ + { + Name: "property is found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: pr, + }, + { + Name: "property is nil", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, nil)}), + Expected: nil, + }, + { + Name: "property is not found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.Property(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_Plugin(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input PluginID + PS *Plugins + Expected *Plugin + }{ + { + Name: "plugin is found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugin(pid, pr), + }, + { + Name: "plugin is not found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.Plugin(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_PluginByName(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input string + PS *Plugins + Expected *Plugin + }{ + { + Name: "plugin is found", + Input: "xxx", + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugin(pid, pr), + }, + { + Name: "plugin is not found", + Input: "xxz", + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.PluginByName(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_Properties(t *testing.T) { + pr := NewPropertyID().Ref() + pr2 := NewPropertyID().Ref() + + tests := []struct { + Name string + PS *Plugins + Expected []PropertyID + }{ + { + Name: "plugins is nil", + PS: nil, + Expected: nil, + }, + { + Name: "get properties", + PS: NewPlugins([]*Plugin{ + NewPlugin(MustPluginID("zzz~1.1.1"), pr), + NewPlugin(MustPluginID("xxx~1.1.1"), pr2), + }), + Expected: []PropertyID{*pr, *pr2}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.Properties() + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_Has(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input PluginID + PS *Plugins + Expected bool + }{ + { + Name: "property is found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: true, + }, + { + Name: "property is not found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.Has(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_HasPlugin(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input PluginID + PS *Plugins + Expected bool + }{ + { + Name: "plugin is found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: true, + }, + { + Name: "plugin is not found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(MustPluginID("xxx~1.2.1"), pr)}), + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.HasPlugin(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_HasPluginByName(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input string + PS *Plugins + Expected bool + }{ + { + Name: "plugin is found", + Input: "xxx", + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: true, + }, + { + Name: "plugin is not found", + Input: "xxxx", + PS: NewPlugins([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.HasPluginByName(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_Add(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input *Plugin + PS, Expected *Plugins + }{ + { + Name: "add nil plugin", + Input: nil, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + }, + { + Name: "add existing plugin", + Input: NewPlugin(pid, pr), + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + }, + { + Name: "add official plugin", + Input: NewPlugin(OfficialPluginID, pr), + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + }, + { + Name: "add new plugin", + Input: NewPlugin(pid, pr), + PS: NewPlugins([]*Plugin{}), + Expected: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + tc.PS.Add(tc.Input) + assert.Equal(t, tc.Expected, tc.PS) + }) + } +} + +func TestPlugins_Remove(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input PluginID + PS, Expected *Plugins + }{ + { + Name: "remove official plugin", + Input: OfficialPluginID, + PS: NewPlugins([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + Expected: NewPlugins([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + }, + { + Name: "remove a plugin", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugins([]*Plugin{}), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + tc.PS.Remove(tc.Input) + assert.Equal(t, tc.Expected, tc.PS) + }) + } +} + +func TestPlugins_Upgrade(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + nid := MustPluginID("zzz~1.1.1") + pr := NewPropertyID().Ref() + pr2 := NewPropertyID().Ref() + + type args struct { + From PluginID + To PluginID + Property *PropertyID + DeleteProperty bool + } + + tests := []struct { + name string + args args + target *Plugins + want *Plugins + }{ + { + name: "upgrade a plugin", + args: args{ + From: pid, + To: nid, + }, + target: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + want: NewPlugins([]*Plugin{NewPlugin(nid, pr)}), + }, + { + name: "upgrade a plugin with changing property", + args: args{ + From: pid, + To: nid, + Property: pr2, + }, + target: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + want: NewPlugins([]*Plugin{NewPlugin(nid, pr2)}), + }, + { + name: "upgrade a plugin with deleting property", + args: args{ + From: pid, + To: nid, + Property: pr2, + DeleteProperty: true, + }, + target: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + want: NewPlugins([]*Plugin{NewPlugin(nid, nil)}), + }, + { + name: "upgrade official plugin", + args: args{ + From: OfficialPluginID, + To: nid, + }, + target: NewPlugins([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + want: NewPlugins([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + }, + { + name: "nil", + args: args{ + From: pid, + To: nid, + }, + target: nil, + want: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.target.Upgrade(tc.args.From, tc.args.To, tc.args.Property, tc.args.DeleteProperty) + assert.Equal(t, tc.want, tc.target) + }) + } +} diff --git a/server/pkg/scene/scene.go b/server/pkg/scene/scene.go new file mode 100644 index 000000000..3957edbb9 --- /dev/null +++ b/server/pkg/scene/scene.go @@ -0,0 +1,120 @@ +package scene + +import ( + "errors" + "time" +) + +var ErrSceneIsLocked error = errors.New("scene is locked") + +type Scene struct { + id ID + project ProjectID + team TeamID + rootLayer LayerID + widgets *Widgets + plugins *Plugins + updatedAt time.Time + property PropertyID + clusters *ClusterList +} + +func (s *Scene) ID() ID { + if s == nil { + return ID{} + } + return s.id +} + +func (s *Scene) CreatedAt() time.Time { + if s == nil { + return time.Time{} + } + return s.id.Timestamp() +} + +func (s *Scene) Project() ProjectID { + if s == nil { + return ProjectID{} + } + return s.project +} + +func (s *Scene) Team() TeamID { + if s == nil { + return TeamID{} + } + return s.team +} + +func (s *Scene) Property() PropertyID { + if s == nil { + return PropertyID{} + } + return s.property +} + +func (s *Scene) RootLayer() LayerID { + if s == nil { + return LayerID{} + } + return s.rootLayer +} + +func (s *Scene) Widgets() *Widgets { + if s == nil { + return nil + } + return s.widgets +} + +func (s *Scene) Plugins() *Plugins { + if s == nil { + return nil + } + return s.plugins +} + +func (s *Scene) UpdatedAt() time.Time { + if s == nil { + return time.Time{} + } + return s.updatedAt +} + +func (s *Scene) SetUpdatedAt(updatedAt time.Time) { + if s == nil { + return + } + s.updatedAt = updatedAt +} + +func (s *Scene) IsTeamIncluded(teams []TeamID) bool { + if s == nil || teams == nil { + return false + } + for _, t := range teams { + if t == s.team { + return true + } + } + return false +} + +func (s *Scene) Properties() []PropertyID { + if s == nil { + return nil + } + ids := []PropertyID{s.property} + ids = append(ids, s.plugins.Properties()...) + ids = append(ids, s.widgets.Properties()...) + ids = append(ids, s.clusters.Properties()...) + return ids +} + +func (s *Scene) Clusters() *ClusterList { + if s == nil { + return nil + } + return s.clusters +} diff --git a/server/pkg/scene/scene_test.go b/server/pkg/scene/scene_test.go new file mode 100644 index 000000000..83e6c771f --- /dev/null +++ b/server/pkg/scene/scene_test.go @@ -0,0 +1,120 @@ +package scene + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestScene_IsTeamIncluded(t *testing.T) { + tid := NewTeamID() + + tests := []struct { + Name string + Teams []TeamID + S *Scene + Expected bool + }{ + { + Name: "nil scene", + Teams: []TeamID{NewTeamID()}, + S: nil, + Expected: false, + }, + { + Name: "nil teams", + Teams: nil, + S: New().NewID().Team(NewTeamID()).RootLayer(NewLayerID()).MustBuild(), + Expected: false, + }, + { + Name: "teams exist", + Teams: []TeamID{tid}, + S: New().NewID().Team(tid).RootLayer(NewLayerID()).MustBuild(), + Expected: true, + }, + { + Name: "teams not exist", + Teams: []TeamID{tid}, + S: New().NewID().Team(NewTeamID()).RootLayer(NewLayerID()).MustBuild(), + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.S.IsTeamIncluded(tc.Teams) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestScene_SetUpdatedAt(t *testing.T) { + s := New().NewID().Team(NewTeamID()).RootLayer(NewLayerID()).UpdatedAt(time.Date(1999, 1, 1, 00, 00, 1, 1, time.UTC)).MustBuild() + s.SetUpdatedAt(time.Date(2021, 1, 1, 00, 00, 1, 1, time.UTC)) + assert.Equal(t, time.Date(2021, 1, 1, 00, 00, 1, 1, time.UTC), s.UpdatedAt()) + s = nil + s.SetUpdatedAt(time.Date(2020, 1, 1, 00, 00, 1, 1, time.UTC)) + assert.NotEqual(t, time.Date(2020, 1, 1, 00, 00, 1, 1, time.UTC), s.UpdatedAt()) +} + +func TestScene_Properties(t *testing.T) { + pid1 := NewPropertyID() + pid2 := NewPropertyID() + s := New(). + NewID(). + Team(NewTeamID()). + RootLayer(NewLayerID()). + Property(pid1). + Widgets( + NewWidgets([]*Widget{ + MustWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", pid2, true, false), + }, nil), + ). + MustBuild() + + assert.Equal(t, []PropertyID{pid1, pid2}, s.Properties()) +} + +func TestSceneNil(t *testing.T) { + var s *Scene + assert.Nil(t, s.Properties()) + assert.True(t, s.ID().IsEmpty()) + assert.Nil(t, s.Widgets()) + assert.True(t, s.Project().IsEmpty()) + assert.True(t, s.Team().IsEmpty()) + assert.True(t, s.RootLayer().IsEmpty()) + assert.True(t, s.CreatedAt().IsZero()) + assert.Nil(t, s.Plugins()) + assert.True(t, s.Property().IsEmpty()) +} + +func TestScene_Clusters(t *testing.T) { + c1, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) + + tests := []struct { + name string + scene *Scene + want *ClusterList + }{ + { + name: "should return a cluster list", + scene: &Scene{ + clusters: NewClusterListFrom([]*Cluster{c1}), + }, + want: NewClusterListFrom([]*Cluster{c1}), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + assert.Equal(t, tc.want, tc.scene.Clusters()) + }) + } +} diff --git a/server/pkg/scene/sceneops/dataset_migrator.go b/server/pkg/scene/sceneops/dataset_migrator.go new file mode 100644 index 000000000..5dde9729a --- /dev/null +++ b/server/pkg/scene/sceneops/dataset_migrator.go @@ -0,0 +1,273 @@ +package sceneops + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/layerops" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" +) + +// TODO: define new loader types and use them instead of repos +type DatasetMigrator struct { + PropertyRepo repo.Property + LayerRepo repo.Layer + DatasetSchemaRepo repo.DatasetSchema + DatasetRepo repo.Dataset + Plugin plugin.Loader +} + +type MigrateDatasetResult struct { + Layers layer.Map + Properties property.Map + RemovedLayers *layer.IDSet + RemovedDatasetSchemas []dataset.SchemaID + RemovedDatasets []dataset.ID +} + +func (r MigrateDatasetResult) Merge(r2 MigrateDatasetResult) MigrateDatasetResult { + return MigrateDatasetResult{ + Layers: r.Layers.Merge(r2.Layers), + Properties: r.Properties.Merge(r2.Properties), + RemovedLayers: r.RemovedLayers.Concat(r2.RemovedLayers), + } +} + +// NOTE: DatasetSchemaใฎๅ‰Š้™คใซใฏๅฏพๅฟœใ—ใฆใ„ใชใ„๏ผˆ่‡ชๅ‹•็š„ใซๅ‰Š้™คใ•ใ‚Œใชใ„๏ผ‰ +func (srv DatasetMigrator) Migrate(ctx context.Context, sid dataset.SceneID, newdsl []*dataset.Schema, newdl dataset.List) (MigrateDatasetResult, error) { + result := MigrateDatasetResult{} + + // ๅ‰Š้™คๅฏพ่ฑก + noLogerUsedDS := []dataset.SchemaID{} + noLogerUsedD := []dataset.ID{} + + // ๅคใ„DatasetSchema + oldDatasetSchemaMap := map[dataset.SchemaID]*dataset.Schema{} + // ๆ–ฐใ—ใ„DatasetSchema + newDatasetSchemaMap := map[dataset.SchemaID]*dataset.Schema{} + // ๆ–ฐใ—ใ„DatasetSchemaใ‹ใ‚‰ๅคใ„DatasetSchemaIDใธใฎๅฏพๅฟœ + datasetSchemaMapNewOld := map[dataset.SchemaID]dataset.SchemaID{} + // ๅคใ„DatasetSchemaใ‹ใ‚‰ๆ–ฐใ—ใ„DatasetSchemaIDใธใฎๅฏพๅฟœ + datasetSchemaMapOldNew := map[dataset.SchemaID]dataset.SchemaID{} + // ๅคใ„DatasetFieldIDใ‹ใ‚‰ๆ–ฐใ—ใ„DatasetSchemaFieldIDใธใฎๅฏพๅฟœ + datasetSchemaFieldIDMap := map[dataset.FieldID]dataset.FieldID{} + // ๅคใ„Datasetใ‹ใ‚‰ๆ–ฐใ—ใ„Datasetใธใฎๅฏพๅฟœ + newDatasetMap := map[dataset.ID]*dataset.Dataset{} + datasetMapOldNew := map[dataset.ID]*dataset.Dataset{} + datasetIDMapOldNew := map[dataset.ID]dataset.ID{} + // ๆ–ฐใ—ใ„DatasetSchemaใ‹ใ‚‰DatasetDiffใธใฎๅฏพๅฟœ + datasetDiffMap := map[dataset.SchemaID]dataset.Diff{} + + // ใƒžใƒƒใƒ—ใฎไฝœๆˆ + for _, newds := range newdsl { + newDatasetSchemaMap[newds.ID()] = newds + + // ใ‚ฝใƒผใ‚นๅ…ƒใŒๅŒใ˜ๅคใ„DSใ‚’ๅ–ๅพ— + olddsl, err := srv.DatasetSchemaRepo.FindBySceneAndSource(ctx, sid, newds.Source()) + if err != nil { + return MigrateDatasetResult{}, err + } + + // ๅคใ„ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ๆŽขใ™๏ผˆๆ–ฐใ—ใ่ฟฝๅŠ ใ•ใ‚ŒใŸใ‚‚ใฎใ‚‚ๅ…ฅใ‚Š่พผใ‚“ใงใ„ใ‚‹ใฎใง๏ผ‰ + var oldds *dataset.Schema + for _, o := range olddsl { + if o.ID() != newds.ID() { + oldds = o + } + } + if oldds == nil { + // ใชใ„ใชใ‚‰ใƒชใƒณใ‚ฏใ•ใ‚Œใฆใ„ใ‚‹ใƒฌใ‚คใƒคใƒผใ‚„ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใ‚‚ไฝœๆˆใ•ใ‚Œใฆใ„ใชใ„ใฏใšใชใฎใง็„ก่ฆ– + continue + } + + oldDatasetSchemaMap[oldds.ID()] = oldds + datasetSchemaMapNewOld[newds.ID()] = oldds.ID() + datasetSchemaMapOldNew[oldds.ID()] = newds.ID() + + // ใƒ•ใ‚ฃใƒผใƒซใƒ‰ใฎๅทฎๅˆ†ใ‚’ๅ–ใ‚‹ + fieldDiff := oldds.FieldDiffBySource(newds) + for of, f := range fieldDiff.Replaced { + datasetSchemaFieldIDMap[of] = f.ID() + } + + // ๅคใ„DSใฎDใ‚’ๆŽขใ—ๅ‡บใ™ + olddl, _, err := srv.DatasetRepo.FindBySchema(ctx, oldds.ID(), nil) + if err != nil { + return MigrateDatasetResult{}, err + } + + // ๅ‰Š้™คๅฏพ่ฑกใซ่ฟฝๅŠ  + noLogerUsedDS = append(noLogerUsedDS, oldds.ID()) + for _, oldd := range olddl { + noLogerUsedD = append(noLogerUsedD, oldd.ID()) + } + + // ๆ–ฐใ—ใ„DSใฎDใฎใฟๆŠฝๅ‡บ + currentNewdl := newdl.FilterByDatasetSchema(newds.ID()) + + // ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎๅทฎๅˆ†ใ‚’ใจใ‚‹ + diff := dataset.List(olddl).DiffBySource(currentNewdl) + datasetDiffMap[newds.ID()] = diff + for od, d := range diff.Others { + datasetMapOldNew[od] = d + datasetIDMapOldNew[od] = d.ID() + newDatasetMap[d.ID()] = d + } + } + + // ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใฎใƒžใ‚คใ‚ฐใƒฌใƒผใ‚ทใƒงใƒณ + propeties, err := srv.PropertyRepo.FindLinkedAll(ctx, sid) + if err != nil { + return MigrateDatasetResult{}, err + } + for _, p := range propeties { + p.MigrateDataset(property.DatasetMigrationParam{ + OldDatasetSchemaMap: datasetSchemaMapOldNew, + OldDatasetMap: datasetIDMapOldNew, + DatasetFieldIDMap: datasetSchemaFieldIDMap, + NewDatasetSchemaMap: newDatasetSchemaMap, + NewDatasetMap: newDatasetMap, + }) + } + result.Properties = propeties.Map() + + // ๆ–ฐใ—ใ„DSใงใƒซใƒผใƒ— + for _, newds := range newdsl { + oldds := oldDatasetSchemaMap[datasetSchemaMapNewOld[newds.ID()]] + if oldds == nil { + // ใƒชใƒณใ‚ฏใ•ใ‚Œใฆใ„ใ‚‹ใƒฌใ‚คใƒคใƒผใ‚„ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใ‚‚ไฝœๆˆใ•ใ‚Œใฆใ„ใชใ„ใฏใšใชใฎใง็„ก่ฆ– + continue + } + diff, ok := datasetDiffMap[newds.ID()] + if !ok { + continue + } + + // ใƒฌใ‚คใƒคใƒผใฎใƒžใ‚คใ‚ฐใƒฌใƒผใ‚ทใƒงใƒณ + result2, err := srv.migrateLayer(ctx, sid, oldds, newds, diff) + if err != nil { + return MigrateDatasetResult{}, err + } + + result = result.Merge(result2) + } + + result.RemovedDatasetSchemas = append(result.RemovedDatasetSchemas, noLogerUsedDS...) + result.RemovedDatasets = append(result.RemovedDatasets, noLogerUsedD...) + return result, nil +} + +func (srv DatasetMigrator) migrateLayer(ctx context.Context, sid dataset.SceneID, oldds *dataset.Schema, newds *dataset.Schema, diff dataset.Diff) (MigrateDatasetResult, error) { + // ๅ‰ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚นใ‚ญใƒผใƒžใซ็ดใฅใ„ใŸใƒฌใ‚คใƒคใƒผใ‚ฐใƒซใƒผใƒ—ใ‚’ๅ–ๅพ— + layerGroups, err := srv.LayerRepo.FindGroupBySceneAndLinkedDatasetSchema(ctx, sid, oldds.ID()) + if err != nil { + return MigrateDatasetResult{}, err + } + + addedAndUpdatedLayers := layer.List{} + addedProperties := property.List{} + removedLayers := []layer.ID{} + + for _, lg := range layerGroups { + layers, err := srv.LayerRepo.FindByIDs(ctx, lg.Layers().Layers()) + if err != nil { + return MigrateDatasetResult{}, err + } + + // ใ‚นใ‚ญใƒผใƒžใŒๆถˆๆป…ใ—ใŸๅ ดๅˆ + if newds == nil { + // ใƒฌใ‚คใƒคใƒผใ‚ฐใƒซใƒผใƒ—่‡ชไฝ“ใ‚’ใ‚ขใƒณใƒชใƒณใ‚ฏ + lg.Unlink() + // ๅญใƒฌใ‚คใƒคใƒผใ‚’ๅ…จใฆๅ‰Š้™ค + for _, l := range layers { + if l == nil { + continue + } + lid := (*l).ID() + removedLayers = append(removedLayers, lid) + } + lg.Layers().Empty() + continue + } + + // ใƒฌใ‚คใƒคใƒผใ‚ฐใƒซใƒผใƒ—ใฎใƒชใƒณใ‚ฏๅผตใ‚Šๆ›ฟใˆใจๅๅ‰ๅค‰ๆ›ด + lg.Link(newds.ID()) + if lg.Name() == oldds.Name() { + lg.Rename(newds.Name()) + } + + // ๆถˆใˆใŸใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆโ†’ใƒฌใ‚คใƒคใƒผใ‚’ๅ‰Š้™ค + for _, d := range diff.Removed { + if l := layers.FindByDataset(d.ID()); l != nil { + lg.Layers().RemoveLayer(l.ID()) + removedLayers = append(removedLayers, l.ID()) + } + } + + // ่ฟฝๅŠ ใ•ใ‚ŒใŸใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆโ†’ใƒฌใ‚คใƒคใƒผใ‚’ไฝœๆˆใ—ใฆ่ฟฝๅŠ  + if len(diff.Added) > 0 { + // ใƒ—ใƒฉใ‚ฐใ‚คใƒณใ‚’ๅ–ๅพ— + var plug *plugin.Plugin + if pid := lg.Plugin(); pid != nil { + plug2, err := srv.Plugin(ctx, []plugin.ID{*pid}) + if err != nil || len(plug2) < 1 { + return MigrateDatasetResult{}, err + } + plug = plug2[0] + } + + representativeFieldID := newds.RepresentativeFieldID() + for _, added := range diff.Added { + did := added.ID() + + name := "" + if rf := added.FieldRef(representativeFieldID); rf != nil && rf.Type() == dataset.ValueTypeString { + name = rf.Value().Value().(string) + } + + layerItem, property, err := layerops.LayerItem{ + SceneID: sid, + ParentLayerID: lg.ID(), + LinkedDatasetID: &did, + Plugin: plug, + ExtensionID: lg.Extension(), + Name: name, + }.Initialize() + if err != nil { + return MigrateDatasetResult{}, err + } + + var l layer.Layer = layerItem + lg.Layers().AddLayer(layerItem.ID(), -1) + addedAndUpdatedLayers = append(addedAndUpdatedLayers, &l) + addedProperties = append(addedProperties, property) + } + } + + // ๆฎ‹ใ‚Šใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆโ†’ใƒฌใ‚คใƒคใƒผใฎใƒชใƒณใ‚ฏใ‚’ๅผตใ‚Šๆ›ฟใˆ + for olddsid, newds := range diff.Others { + if il := layers.FindByDataset(olddsid); il != nil { + var il2 layer.Layer = il + il.Link(newds.ID()) + addedAndUpdatedLayers = append(addedAndUpdatedLayers, &il2) + } + } + } + + layers := append( + addedAndUpdatedLayers, + layerGroups.ToLayerList()..., + ) + + set := layer.NewIDSet() + set.Add(removedLayers...) + + return MigrateDatasetResult{ + Layers: layers.Map(), + Properties: addedProperties.Map(), + RemovedLayers: set, + }, nil +} diff --git a/server/pkg/scene/sceneops/plugin_installer.go b/server/pkg/scene/sceneops/plugin_installer.go new file mode 100644 index 000000000..936c8d947 --- /dev/null +++ b/server/pkg/scene/sceneops/plugin_installer.go @@ -0,0 +1,122 @@ +package sceneops + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/scene" +) + +type PluginInstaller struct { + // PluginRepo repo.Plugin + // PluginRepositoryRepo gateway.PluginRepository + // PropertySchemaRepo repo.PropertySchema +} + +func (s PluginInstaller) InstallPluginFromRepository(pluginID scene.PluginID) error { + return errors.New("not implemented") + + // manifest, err := s.PluginRepositoryRepo.Manifest(pluginID) + // if err != nil { + // return err + // } + + // // save + // if manifest.Schema != nil { + // err = s.PropertySchemaRepo.SaveAll(manifest.Schema) + // if err != nil { + // return err + // } + // } + + // for _, s := range manifest.ExtensionSchema { + // err = i.propertySchemaRepo.Save(&s) + // if err != nil { + // i.output.Upload(nil, rerror.ErrInternalBy(err)) + // return + // } + // } + + // err = i.pluginRepo.Save(plugin) + // if err != nil { + // i.output.Upload(nil, rerror.ErrInternalBy(err)) + // return + // } + + // // Download and extract plugin files to storage + // data, err := i.pluginRepositoryRepo.Data(inp.Name, inp.Version) + // if err != nil { + // i.output.Upload(nil, rerror.ErrInternalBy(err)) + // return + // } + + // _, err = i.fileRepo.UploadAndExtractPluginFiles(data, plugin) + // if err != nil { + // i.output.Upload(nil, rerror.ErrInternalBy(err)) + // return + // } + + // return nil + // } + + // // UploadPlugin _ + // func (s PluginInstaller) UploadPlugin(reader io.Reader) error { + // panic("not implemented") + + // manifest, err := s.PluginRepositoryRepo.Manifest(inp.Name, inp.Version) + // if err != nil { + // i.output.Upload(nil, err) + // return + // } + + // // build plugin + // plugin, err := plugin.New(). + // NewID(). + // FromManifest(manifest). + // Developer(operator.User). + // PluginSeries(pluginSeries.ID()). + // CreatedAt(time.Now()). + // Public(inp.Public). + // Build() + // if err != nil { + // i.output.Upload(nil, rerror.ErrInternalBy(err)) + // return + // } + + // // save + // if manifest.Schema != nil { + // err = i.propertySchemaRepo.Save(manifest.Schema) + // if err != nil { + // i.output.Upload(nil, rerror.ErrInternalBy(err)) + // return + // } + // } + + // for _, s := range manifest.ExtensionSchema { + // err = i.propertySchemaRepo.Save(&s) + // if err != nil { + // i.output.Upload(nil, rerror.ErrInternalBy(err)) + // return + // } + // } + + // err = i.pluginRepo.Save(plugin) + // if err != nil { + // i.output.Upload(nil, rerror.ErrInternalBy(err)) + // return + // } + + // // Download and extract plugin files to storage + // data, err := i.pluginRepositoryRepo.Data(inp.Name, inp.Version) + // if err != nil { + // i.output.Upload(nil, rerror.ErrInternalBy(err)) + // return + // } + + // _, err = i.fileRepo.UploadAndExtractPluginFiles(data, plugin) + // if err != nil { + // i.output.Upload(nil, rerror.ErrInternalBy(err)) + // return + // } + + // return nil +} diff --git a/server/pkg/scene/sceneops/plugin_migrator.go b/server/pkg/scene/sceneops/plugin_migrator.go new file mode 100644 index 000000000..802ff72a4 --- /dev/null +++ b/server/pkg/scene/sceneops/plugin_migrator.go @@ -0,0 +1,242 @@ +package sceneops + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type PluginMigrator struct { + Property property.Loader + PropertySchema property.SchemaLoader + Dataset dataset.Loader + Layer layer.LoaderByScene + Plugin plugin.Loader +} + +type MigratePluginsResult struct { + Scene *scene.Scene + Layers layer.List + Properties []*property.Property + RemovedLayers []layer.ID + RemovedProperties []property.ID +} + +var ( + ErrPluginNotInstalled error = errors.New("plugin not installed") + ErrInvalidPlugins error = errors.New("invalid plugins") +) + +func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, oldPluginID, newPluginID plugin.ID) (MigratePluginsResult, error) { + if s == nil { + return MigratePluginsResult{}, rerror.ErrInternalBy(errors.New("scene is nil")) + } + + if oldPluginID.Equal(newPluginID) || !oldPluginID.NameEqual(newPluginID) { + return MigratePluginsResult{}, ErrInvalidPlugins + } + + if !sc.Plugins().Has(oldPluginID) { + return MigratePluginsResult{}, ErrPluginNotInstalled + } + + plugins, err := s.Plugin(ctx, []plugin.ID{oldPluginID, newPluginID}) + if err != nil || len(plugins) < 2 { + return MigratePluginsResult{}, ErrInvalidPlugins + } + + oldPlugin := plugins[0] + newPlugin := plugins[1] + + // ๅ…จใƒฌใ‚คใƒคใƒผใฎๅ–ๅพ— + layers, err := s.Layer(ctx, sc.ID()) + if err != nil { + return MigratePluginsResult{}, err + } + + modifiedLayers := layer.List{} + removedLayers := []layer.ID{} + propertyIDs := []property.ID{} + removedPropertyIDs := []property.ID{} + schemaMap := map[property.SchemaID]*property.Schema{} + + // ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใ‚นใ‚ญใƒผใƒžใฎๅ–ๅพ—ใจใ€ๅคใ„ใ‚นใ‚ญใƒผใƒžใจๆ–ฐใ—ใ„ใ‚นใ‚ญใƒผใƒžใฎใƒžใƒƒใƒ—ไฝœๆˆ + schemaIDs := []property.SchemaID{} + if oldPlugin.Schema() != nil { + if pps := newPlugin.Schema(); pps != nil { + schemaIDs = append(schemaIDs, *pps) + } + } + for _, e := range newPlugin.Extensions() { + schemaIDs = append(schemaIDs, e.Schema()) + } + schemas, err := s.PropertySchema(ctx, schemaIDs...) + if err != nil { + return MigratePluginsResult{}, err + } + if oops := oldPlugin.Schema(); oops != nil { + if pps := newPlugin.Schema(); pps != nil { + for _, s := range schemas { + if s.ID() == *pps { + schemaMap[*oops] = s + } + } + } + } + for _, e := range oldPlugin.Extensions() { + if ne := newPlugin.Extension(e.ID()); ne != nil { + for _, s := range schemas { + if s.ID() == ne.Schema() { + schemaMap[e.Schema()] = s + } + } + } + } + + // ใ‚ทใƒผใƒณใฎใƒ—ใƒฉใ‚ฐใ‚คใƒณ + sc.Plugins().Upgrade(oldPluginID, newPluginID, nil, false) + for _, sp := range sc.Plugins().Plugins() { + if sp.Plugin().Equal(newPluginID) && sp.Property() != nil { + propertyIDs = append(propertyIDs, *sp.Property()) + } + } + + // ใ‚ทใƒผใƒณใฎใ‚ฆใ‚ฃใ‚ธใ‚งใƒƒใƒˆ + sc.Widgets().UpgradePlugin(oldPluginID, newPluginID) + for _, w := range sc.Widgets().Widgets() { + if w.Plugin().Equal(newPluginID) { + if newPlugin.Extension(w.Extension()) == nil { + sc.Widgets().RemoveAllByPlugin(oldPluginID, w.Extension().Ref()) + } else { + propertyIDs = append(propertyIDs, w.Property()) + } + } + } + + // ใƒฌใ‚คใƒคใƒผ + for _, l := range layers { + if l == nil { + continue + } + ll := *l + llp := ll.Plugin() + lle := ll.Extension() + + // ไธๆญฃใชใƒฌใ‚คใƒคใƒผใฎๆคœๅ‡บ + if llp != nil && lle != nil && (*llp).Equal(oldPluginID) { + if newPlugin.Extension(*lle) == nil { + // ๅ‰Š้™ค + removedLayers = append(removedLayers, ll.ID()) + if p := ll.Property(); p != nil { + removedPropertyIDs = append(removedPropertyIDs, *p) + } + if ib := ll.Infobox(); ib != nil { + removedPropertyIDs = append(removedPropertyIDs, ib.Property()) + for _, f := range ib.Fields() { + removedPropertyIDs = append(removedPropertyIDs, f.Property()) + } + } + continue + } + } + + if p := ll.Property(); p != nil { + propertyIDs = append(propertyIDs, *p) + } + + // ไธๆญฃใชInfoboxFieldใฎๅ‰Š้™ค + if ib := ll.Infobox(); ib != nil { + removeFields := []layer.InfoboxFieldID{} + for _, f := range ib.Fields() { + if newPlugin.Extension(f.Extension()) == nil { + removeFields = append(removeFields, f.ID()) + removedPropertyIDs = append(removedPropertyIDs, f.Property()) + } else { + propertyIDs = append(propertyIDs, f.Property()) + } + } + for _, f := range removeFields { + ib.Remove(f) + } + } + + ll.SetPlugin(&newPluginID) + modifiedLayers = append(modifiedLayers, l) + } + + // ไธๆญฃใชใƒฌใ‚คใƒคใƒผใฎใ‚ฐใƒซใƒผใƒ—ใ‹ใ‚‰ใฎๅ‰Š้™ค + for _, lg := range layers.ToLayerGroupList() { + modified := false + canceled := false + for _, l := range removedLayers { + if l == lg.ID() { + canceled = true + break + } + if lg.Layers().HasLayer(l) { + lg.Layers().RemoveLayer(l) + modified = true + } + } + if canceled { + continue + } + if modified { + already := false + for _, l := range modifiedLayers { + if l != nil && (*l).ID() == lg.ID() { + already = true + break + } + } + if already { + continue + } + var lg2 layer.Layer = lg + modifiedLayers = append(modifiedLayers, &lg2) + } + } + + // ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใฎๅ–ๅพ— + properties, err := s.Property(ctx, propertyIDs...) + if err != nil { + return MigratePluginsResult{}, err + } + + // ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎๅ–ๅพ— + datasetIDs := collectDatasetIDs(properties) + datasets, err := s.Dataset(ctx, datasetIDs...) + if err != nil { + return MigratePluginsResult{}, err + } + datasetLoader := datasets.Map().Loader() + + // ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใฎ็งป่กŒไฝœๆฅญ + for _, p := range properties { + if schema := schemaMap[p.Schema()]; schema != nil { + p.MigrateSchema(ctx, schema, datasetLoader) + } + } + + return MigratePluginsResult{ + Scene: sc, + Layers: modifiedLayers, + Properties: properties, + RemovedLayers: removedLayers, + RemovedProperties: removedPropertyIDs, + }, nil +} + +func collectDatasetIDs(properties []*property.Property) []property.DatasetID { + res := []property.DatasetID{} + for _, p := range properties { + res = append(res, p.Datasets()...) + } + return res +} diff --git a/server/pkg/scene/widget.go b/server/pkg/scene/widget.go new file mode 100644 index 000000000..3a1cb7bc4 --- /dev/null +++ b/server/pkg/scene/widget.go @@ -0,0 +1,98 @@ +package scene + +type Widget struct { + id WidgetID + plugin PluginID + extension PluginExtensionID + property PropertyID + enabled bool + extended bool +} + +func NewWidget(wid WidgetID, plugin PluginID, extension PluginExtensionID, property PropertyID, enabled, extended bool) (*Widget, error) { + if !plugin.Validate() || string(extension) == "" || property.IsNil() { + return nil, ErrInvalidID + } + + return &Widget{ + id: wid, + plugin: plugin, + extension: extension, + property: property, + enabled: enabled, + extended: extended, + }, nil +} + +func MustWidget(wid WidgetID, plugin PluginID, extension PluginExtensionID, property PropertyID, enabled bool, extended bool) *Widget { + w, err := NewWidget(wid, plugin, extension, property, enabled, extended) + if err != nil { + panic(err) + } + return w +} + +func (w *Widget) ID() WidgetID { + return w.id +} + +func (w *Widget) Plugin() PluginID { + return w.plugin +} + +func (w *Widget) Extension() PluginExtensionID { + return w.extension +} + +func (w *Widget) Property() PropertyID { + return w.property +} + +func (w *Widget) Enabled() bool { + if w == nil { + return false + } + return w.enabled +} + +func (w *Widget) Extended() bool { + if w == nil { + return false + } + return w.extended +} + +func (w *Widget) SetEnabled(enabled bool) { + if w == nil { + return + } + w.enabled = enabled +} + +func (w *Widget) SetExtended(extended bool) { + if w == nil { + return + } + w.extended = extended +} + +func (w *Widget) Clone() *Widget { + if w == nil { + return nil + } + return &Widget{ + id: w.id, + plugin: w.plugin.Clone(), + extension: w.extension, + property: w.property, + enabled: w.enabled, + extended: w.extended, + } +} + +func (w *Widget) SetPlugin(pid PluginID) { + if w == nil || pid.IsNil() { + return + } + w.plugin = pid.Clone() +} diff --git a/server/pkg/scene/widget_align_system.go b/server/pkg/scene/widget_align_system.go new file mode 100644 index 000000000..c87ef78d9 --- /dev/null +++ b/server/pkg/scene/widget_align_system.go @@ -0,0 +1,118 @@ +package scene + +type WidgetLocation struct { + Zone WidgetZoneType + Section WidgetSectionType + Area WidgetAreaType +} + +func (l WidgetLocation) Horizontal() bool { + return l.Section == WidgetSectionCenter +} + +func (l WidgetLocation) Vertical() bool { + return l.Area == WidgetAreaMiddle +} + +// WidgetAlignSystem is the layout structure of any enabled widgets that will be displayed over the scene. +type WidgetAlignSystem struct { + inner *WidgetZone + outer *WidgetZone +} + +type WidgetZoneType string + +const ( + WidgetZoneInner WidgetZoneType = "inner" + WidgetZoneOuter WidgetZoneType = "outer" +) + +// NewWidgetAlignSystem returns a new widget align system. +func NewWidgetAlignSystem() *WidgetAlignSystem { + return &WidgetAlignSystem{} +} + +// Zone will return a specific zone in the align system. +func (was *WidgetAlignSystem) Zone(zone WidgetZoneType) *WidgetZone { + if was == nil { + return nil + } + switch zone { + case WidgetZoneInner: + if was.inner == nil { + was.inner = NewWidgetZone() + } + return was.inner + case WidgetZoneOuter: + if was.outer == nil { + was.outer = NewWidgetZone() + } + return was.outer + } + return nil +} + +// Remove a widget from the align system. +func (was *WidgetAlignSystem) Remove(wid WidgetID) { + if was == nil { + return + } + + was.inner.Remove(wid) + was.outer.Remove(wid) +} + +func (was *WidgetAlignSystem) Area(loc WidgetLocation) *WidgetArea { + return was.Zone(loc.Zone).Section(loc.Section).Area(loc.Area) +} + +func (was *WidgetAlignSystem) Find(wid WidgetID) (int, WidgetLocation) { + if was == nil { + return -1, WidgetLocation{} + } + + if i, section, area := was.inner.Find(wid); i >= 0 { + return i, WidgetLocation{ + Zone: WidgetZoneInner, + Section: section, + Area: area, + } + } + if i, section, area := was.outer.Find(wid); i >= 0 { + return i, WidgetLocation{ + Zone: WidgetZoneOuter, + Section: section, + Area: area, + } + } + + return -1, WidgetLocation{} +} + +func (was *WidgetAlignSystem) Move(wid WidgetID, location WidgetLocation, index int) { + if was == nil { + return + } + + if i, loc := was.Find(wid); i < 0 { + return + } else if loc != location { + was.Area(loc).Remove(wid) + was.Area(location).Add(wid, index) + } else { + was.Area(location).Move(i, index) + } +} + +func (w *WidgetAlignSystem) SetZone(t WidgetZoneType, z *WidgetZone) { + if w == nil { + return + } + + switch t { + case WidgetZoneInner: + w.inner = z + case WidgetZoneOuter: + w.outer = z + } +} diff --git a/server/pkg/scene/widget_align_system_test.go b/server/pkg/scene/widget_align_system_test.go new file mode 100644 index 000000000..8050c9e78 --- /dev/null +++ b/server/pkg/scene/widget_align_system_test.go @@ -0,0 +1,335 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewWidgetAlignSystem(t *testing.T) { + assert.Equal(t, &WidgetAlignSystem{}, NewWidgetAlignSystem()) +} + +func TestWidgetAlignSystem_Zone(t *testing.T) { + was := NewWidgetAlignSystem() + assert.Same(t, was.inner, was.Zone(WidgetZoneInner)) + assert.NotNil(t, was.inner) + assert.Same(t, was.outer, was.Zone(WidgetZoneOuter)) + assert.NotNil(t, was.outer) +} + +func TestWidgetAlignSystem_Area(t *testing.T) { + was := NewWidgetAlignSystem() + assert.Same(t, was.inner.right.middle, was.Area(WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionRight, + Area: WidgetAreaMiddle, + })) +} + +func TestWidgetAlignSystem_Find(t *testing.T) { + wid1 := NewWidgetID() + wid2 := NewWidgetID() + wid3 := NewWidgetID() + wid4 := NewWidgetID() + wid5 := NewWidgetID() + + tests := []struct { + Name string + Input WidgetID + Expected1 int + Expected2 WidgetLocation + Nil bool + }{ + { + Name: "inner", + Input: wid2, + Expected1: 1, + Expected2: WidgetLocation{Zone: WidgetZoneInner, Section: WidgetSectionLeft, Area: WidgetAreaTop}, + }, + { + Name: "outer", + Input: wid4, + Expected1: 0, + Expected2: WidgetLocation{Zone: WidgetZoneOuter, Section: WidgetSectionLeft, Area: WidgetAreaTop}, + }, + { + Name: "invalid id", + Input: NewWidgetID(), + Expected1: -1, + Expected2: WidgetLocation{}, + }, + { + Name: "Return nil if no widget section", + Input: wid1, + Nil: true, + Expected1: -1, + Expected2: WidgetLocation{}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + if tc.Nil { + index, location := (*WidgetAlignSystem)(nil).Find(tc.Input) + assert.Equal(t, tc.Expected1, index) + assert.Equal(t, tc.Expected2, location) + return + } + + was := NewWidgetAlignSystem() + was.Zone(WidgetZoneInner).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll(WidgetIDList{wid1, wid2, wid3}) + was.Zone(WidgetZoneOuter).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll(WidgetIDList{wid4, wid5}) + + index, location := was.Find(tc.Input) + assert.Equal(t, tc.Expected1, index) + assert.Equal(t, tc.Expected2, location) + }) + } +} + +func TestWidgetAlignSystem_Remove(t *testing.T) { + wid := NewWidgetID() + + tests := []struct { + Name string + Zone WidgetZoneType + Input WidgetID + Expected WidgetIDList + Nil bool + }{ + { + Name: "inner: remove a widget from widget section", + Zone: WidgetZoneInner, + Input: wid, + Expected: WidgetIDList{}, + }, + { + Name: "inner: couldn't find widgetId", + Zone: WidgetZoneInner, + Input: NewWidgetID(), + Expected: WidgetIDList{wid}, + }, + { + Name: "outer: remove a widget from widget section", + Zone: WidgetZoneOuter, + Input: wid, + Expected: WidgetIDList{}, + }, + { + Name: "outer: couldn't find widgetId", + Zone: WidgetZoneOuter, + Input: NewWidgetID(), + Expected: WidgetIDList{wid}, + }, + { + Name: "nil", + Zone: WidgetZoneInner, + Input: wid, + Nil: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + if tc.Nil { + (*WidgetZone)(nil).Remove(tc.Input) + return + } + + ws := NewWidgetAlignSystem() + ws.Zone(tc.Zone).Section(WidgetSectionLeft).Area(WidgetAreaTop).Add(wid, -1) + ws.Remove(tc.Input) + assert.Equal(t, tc.Expected, ws.Zone(tc.Zone).Section(WidgetSectionLeft).Area(WidgetAreaTop).WidgetIDs()) + }) + } +} + +func TestWidgetAlignSystem_Move(t *testing.T) { + wid1 := NewWidgetID() + wid2 := NewWidgetID() + wid3 := NewWidgetID() + wid4 := NewWidgetID() + wid5 := NewWidgetID() + + tests := []struct { + Name string + Input1 WidgetID + Input2 WidgetLocation + Input3 int + Source WidgetLocation + ExpectedSource WidgetIDList + ExpectedDest WidgetIDList + Nil bool + }{ + { + Name: "move a widget in the same area with positive index", + Input1: wid1, + Input2: WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + Input3: 1, + Source: WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + ExpectedSource: WidgetIDList{wid2, wid1, wid3}, + ExpectedDest: WidgetIDList{wid2, wid1, wid3}, + }, + { + Name: "move a widget in the same area with negative index", + Input1: wid1, + Input2: WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + Input3: -1, + Source: WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + ExpectedSource: WidgetIDList{wid2, wid3, wid1}, + ExpectedDest: WidgetIDList{wid2, wid3, wid1}, + }, + { + Name: "move a widget to a different area with positive index", + Input1: wid1, + Input2: WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionLeft, + Area: WidgetAreaBottom, + }, + Input3: 1, + Source: WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionRight, + Area: WidgetAreaTop, + }, + ExpectedSource: WidgetIDList{wid2, wid3}, + ExpectedDest: WidgetIDList{wid4, wid1, wid5}, + }, + { + Name: "move a widget to a different area with negative index", + Input1: wid1, + Input2: WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionLeft, + Area: WidgetAreaBottom, + }, + Input3: -1, + Source: WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionCenter, + Area: WidgetAreaMiddle, + }, + ExpectedSource: WidgetIDList{wid2, wid3}, + ExpectedDest: WidgetIDList{wid4, wid5, wid1}, + }, + { + Name: "nil", + Nil: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + if tc.Nil { + (*WidgetAlignSystem)(nil).Move(tc.Input1, tc.Input2, tc.Input3) + return + } + + ws := NewWidgetAlignSystem() + ws.Area(tc.Source).AddAll(WidgetIDList{wid1, wid2, wid3}) + if tc.Source != tc.Input2 { + ws.Area(tc.Input2).AddAll(WidgetIDList{wid4, wid5}) + } + + ws.Move(tc.Input1, tc.Input2, tc.Input3) + + assert.Equal(t, tc.ExpectedSource, ws.Area(tc.Source).WidgetIDs()) + assert.Equal(t, tc.ExpectedDest, ws.Area(tc.Input2).WidgetIDs()) + }) + } +} + +func TestWidgetAlignSystem_SetZone(t *testing.T) { + type args struct { + t WidgetZoneType + z *WidgetZone + } + + tests := []struct { + name string + args args + nil bool + }{ + { + name: "inner", + args: args{ + t: WidgetZoneInner, + z: &WidgetZone{}, + }, + }, + { + name: "outer", + args: args{ + t: WidgetZoneOuter, + z: &WidgetZone{}, + }, + }, + { + name: "nil area", + args: args{ + t: WidgetZoneInner, + z: nil, + }, + }, + { + name: "nil", + args: args{ + t: WidgetZoneInner, + z: &WidgetZone{}, + }, + nil: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var w *WidgetAlignSystem + if !tt.nil { + w = &WidgetAlignSystem{} + } + + w.SetZone(tt.args.t, tt.args.z) + + if !tt.nil { + var z2 *WidgetZone + switch tt.args.t { + case WidgetZoneInner: + z2 = w.inner + case WidgetZoneOuter: + z2 = w.outer + } + assert.Same(t, tt.args.z, z2) + } + }) + } +} diff --git a/server/pkg/scene/widget_area.go b/server/pkg/scene/widget_area.go new file mode 100644 index 000000000..fe69840be --- /dev/null +++ b/server/pkg/scene/widget_area.go @@ -0,0 +1,107 @@ +package scene + +import ( + "github.com/samber/lo" +) + +// WidgetArea has the widgets and alignment information found in each part area of a section. +type WidgetArea struct { + widgetIds WidgetIDList + align WidgetAlignType +} + +type WidgetAlignType string + +const ( + WidgetAlignStart WidgetAlignType = "start" + WidgetAlignCentered WidgetAlignType = "centered" + WidgetAlignEnd WidgetAlignType = "end" +) + +func NewWidgetArea(widgetIds []WidgetID, align WidgetAlignType) *WidgetArea { + wa := &WidgetArea{} + wa.AddAll(widgetIds) + wa.SetAlignment(align) + return wa +} + +// WidgetIds will return a slice of widget ids from a specific area. +func (a *WidgetArea) WidgetIDs() WidgetIDList { + if a == nil { + return nil + } + + return a.widgetIds.Clone() +} + +// Alignment will return the alignment of a specific area. +func (a *WidgetArea) Alignment() WidgetAlignType { + if a == nil { + return "" + } + + return a.align +} + +func (a *WidgetArea) Find(wid WidgetID) int { + if a == nil { + return -1 + } + return lo.IndexOf(a.widgetIds, wid) +} + +func (a *WidgetArea) Add(wid WidgetID, index int) { + if a == nil || a.widgetIds.Has(wid) { + return + } + + if i := a.widgetIds.Index(wid); i >= 0 { + a.widgetIds = a.widgetIds.DeleteAt(i) + if i < index { + index-- + } + } + a.widgetIds = a.widgetIds.Insert(index, wid) +} + +func (a *WidgetArea) AddAll(wids []WidgetID) { + if a == nil { + return + } + + a.widgetIds = a.widgetIds.AddUniq(wids...) +} + +func (a *WidgetArea) SetAlignment(at WidgetAlignType) { + if a == nil { + return + } + + if at == WidgetAlignStart || at == WidgetAlignCentered || at == WidgetAlignEnd { + a.align = at + } else { + a.align = WidgetAlignStart + } +} + +func (a *WidgetArea) Remove(wid WidgetID) { + if a == nil { + return + } + + for i, w := range a.widgetIds { + if w == wid { + a.widgetIds = a.widgetIds.DeleteAt(i) + return + } + } +} + +func (a *WidgetArea) Move(from, to int) { + if a == nil { + return + } + + wid := a.widgetIds[from] + a.widgetIds = a.widgetIds.DeleteAt(from).Insert(to, wid) +} diff --git a/server/pkg/scene/widget_area_test.go b/server/pkg/scene/widget_area_test.go new file mode 100644 index 000000000..fa65c3d80 --- /dev/null +++ b/server/pkg/scene/widget_area_test.go @@ -0,0 +1,327 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestWidgetArea(t *testing.T) { + wid1 := NewWidgetID() + wid2 := NewWidgetID() + + tests := []struct { + Name string + Input1 WidgetIDList + Input2 WidgetAlignType + Expected *WidgetArea + }{ + { + Name: "New widget area with proper widget ids and widget align type", + Input1: WidgetIDList{wid1, wid2}, + Input2: WidgetAlignEnd, + Expected: &WidgetArea{widgetIds: WidgetIDList{wid1, wid2}, align: WidgetAlignEnd}, + }, + { + Name: "New widget area with duplicated widget ids", + Input1: WidgetIDList{wid1}, + Input2: WidgetAlignEnd, + Expected: &WidgetArea{widgetIds: WidgetIDList{wid1}, align: WidgetAlignEnd}, + }, + { + Name: "New widget area with wrong widget align type", + Input1: WidgetIDList{wid1, wid2}, + Input2: "wrong", + Expected: &WidgetArea{widgetIds: WidgetIDList{wid1, wid2}, align: WidgetAlignStart}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + wa := NewWidgetArea(tc.Input1, tc.Input2) + assert.Equal(t, tc.Expected, wa) + }) + } +} + +func TestWidgetArea_WidgetIDs(t *testing.T) { + wid := NewWidgetID() + wa := NewWidgetArea(WidgetIDList{wid}, WidgetAlignStart) + assert.Equal(t, wa.widgetIds, wa.WidgetIDs()) + assert.Nil(t, (*WidgetArea)(nil).WidgetIDs()) +} + +func TestWidgetArea_Alignment(t *testing.T) { + wa := NewWidgetArea(nil, WidgetAlignEnd) + assert.Equal(t, WidgetAlignEnd, wa.Alignment()) + assert.Equal(t, WidgetAlignType(""), (*WidgetArea)(nil).Alignment()) +} + +func TestWidgetArea_Find(t *testing.T) { + wid := NewWidgetID() + wid2 := NewWidgetID() + + tests := []struct { + Name string + Input WidgetID + Expected int + Nil bool + }{ + { + Name: "Return index if contains widget id", + Input: wid, + Expected: 0, + }, + { + Name: "Return -1 if doesn't contain widget id", + Input: wid2, + Expected: -1, + }, + { + Name: "Return nil if WidgetArea is nil", + Nil: true, + Expected: -1, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + var wa *WidgetArea + if !tc.Nil { + wa = NewWidgetArea(WidgetIDList{wid}, WidgetAlignStart) + } + assert.Equal(t, tc.Expected, wa.Find(tc.Input)) + }) + } +} + +func TestWidgetArea_Add(t *testing.T) { + wid1 := NewWidgetID() + wid2 := NewWidgetID() + wid3 := NewWidgetID() + + tests := []struct { + Name string + Nil bool + Input WidgetID + Input2 int + Expected WidgetIDList + }{ + { + Name: "add a widget id", + Input: wid3, + Input2: -1, + Expected: WidgetIDList{wid1, wid2, wid3}, + }, + { + Name: "add a widget id but already exists", + Input: wid1, + Input2: -1, + Expected: WidgetIDList{wid1, wid2}, + }, + { + Name: "insert a widget id", + Input: wid3, + Input2: 1, + Expected: WidgetIDList{wid1, wid3, wid2}, + }, + { + Name: "nil widget area", + Nil: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + if tc.Nil { + (*WidgetArea)(nil).Add(wid1, -1) + return + } + + wa := NewWidgetArea(WidgetIDList{wid1, wid2}, WidgetAlignStart) + wa.Add(tc.Input, tc.Input2) + assert.Equal(t, tc.Expected, wa.WidgetIDs()) + }) + } +} + +func TestWidgetArea_AddAll(t *testing.T) { + wid1 := NewWidgetID() + wid2 := NewWidgetID() + + tests := []struct { + Name string + Nil bool + Input WidgetIDList + Expected WidgetIDList + }{ + { + Name: "add widget ids", + Input: WidgetIDList{wid1, wid2}, + Expected: WidgetIDList{wid1, wid2}, + }, + { + Name: "add widget ids but duplicated", + Input: WidgetIDList{wid1, wid1, wid2}, + Expected: WidgetIDList{wid1, wid2}, + }, + { + Name: "nil widget area", + Nil: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + if tc.Nil { + (*WidgetArea)(nil).AddAll(nil) + return + } + + wa := NewWidgetArea(nil, WidgetAlignStart) + wa.AddAll(tc.Input) + assert.Equal(t, tc.Expected, wa.WidgetIDs()) + }) + } +} + +func TestWidgetArea_SetAlignment(t *testing.T) { + tests := []struct { + Name string + Nil bool + Input WidgetAlignType + Expected WidgetAlignType + }{ + { + Name: "set alignment", + Input: WidgetAlignEnd, + Expected: WidgetAlignEnd, + }, + { + Name: "set alignment with wrong alignment", + Input: "wrong", + Expected: WidgetAlignStart, + }, + { + Name: "set alignment when widget area is nil", + Nil: true, + Input: WidgetAlignStart, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + var wa *WidgetArea + if !tc.Nil { + wa = NewWidgetArea(nil, WidgetAlignStart) + } + wa.SetAlignment(tc.Input) + if !tc.Nil { + assert.Equal(t, tc.Expected, wa.align) + } + }) + } +} + +func TestWidgetArea_Remove(t *testing.T) { + wid := NewWidgetID() + + tests := []struct { + Name string + Input WidgetID + Expected WidgetIDList + Nil bool + }{ + { + Name: "Remove a widget from widget area", + Input: wid, + Expected: WidgetIDList{}, + }, + { + Name: "Remove a widget from widget area that doesn't exist", + Input: NewWidgetID(), + Expected: WidgetIDList{wid}, + }, + { + Name: "Return nil if no widget area", + Input: wid, + Nil: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + var wa *WidgetArea + if !tc.Nil { + wa = NewWidgetArea(WidgetIDList{wid}, "") + } + wa.Remove(tc.Input) + if !tc.Nil { + assert.Equal(t, tc.Expected, wa.widgetIds) + } + }) + } +} + +func TestWidgetArea_Move(t *testing.T) { + wid := NewWidgetID() + wid2 := NewWidgetID() + wid3 := NewWidgetID() + + tests := []struct { + Name string + Input1, Input2 int + Expected WidgetIDList + Nil bool + }{ + { + Name: "Move widget Id", + Input1: 1, + Input2: 2, + Expected: WidgetIDList{wid, wid3, wid2}, + }, + { + Name: "Move widget Id", + Input1: 2, + Input2: 0, + Expected: WidgetIDList{wid3, wid, wid2}, + }, + { + Name: "Nil", + Nil: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + var wa *WidgetArea + if !tc.Nil { + wa = NewWidgetArea(WidgetIDList{wid, wid2, wid3}, "") + } + wa.Move(tc.Input1, tc.Input2) + if !tc.Nil { + assert.Equal(t, tc.Expected, wa.widgetIds) + } + }) + } +} diff --git a/server/pkg/scene/widget_section.go b/server/pkg/scene/widget_section.go new file mode 100644 index 000000000..5a602e150 --- /dev/null +++ b/server/pkg/scene/widget_section.go @@ -0,0 +1,87 @@ +package scene + +// WidgetSection is the structure of each section of the align system. +type WidgetSection struct { + top *WidgetArea + middle *WidgetArea + bottom *WidgetArea +} + +type WidgetAreaType string + +var ( + WidgetAreaTop WidgetAreaType = "top" + WidgetAreaMiddle WidgetAreaType = "middle" + WidgetAreaBottom WidgetAreaType = "bottom" +) + +func NewWidgetSection() *WidgetSection { + return &WidgetSection{} +} + +func (s *WidgetSection) Area(t WidgetAreaType) *WidgetArea { + if s == nil { + return nil + } + + switch t { + case WidgetAreaTop: + if s.top == nil { + s.top = NewWidgetArea(nil, WidgetAlignStart) + } + return s.top + case WidgetAreaMiddle: + if s.middle == nil { + s.middle = NewWidgetArea(nil, WidgetAlignStart) + } + return s.middle + case WidgetAreaBottom: + if s.bottom == nil { + s.bottom = NewWidgetArea(nil, WidgetAlignStart) + } + return s.bottom + } + return nil +} + +func (s *WidgetSection) Find(wid WidgetID) (int, WidgetAreaType) { + if s == nil { + return -1, "" + } + + if i := s.top.Find(wid); i >= 0 { + return i, WidgetAreaTop + } + if i := s.middle.Find(wid); i >= 0 { + return i, WidgetAreaMiddle + } + if i := s.bottom.Find(wid); i >= 0 { + return i, WidgetAreaBottom + } + return -1, "" +} + +func (s *WidgetSection) Remove(wid WidgetID) { + if s == nil { + return + } + + s.top.Remove(wid) + s.middle.Remove(wid) + s.bottom.Remove(wid) +} + +func (s *WidgetSection) SetArea(t WidgetAreaType, a *WidgetArea) { + if s == nil { + return + } + + switch t { + case WidgetAreaTop: + s.top = a + case WidgetAreaMiddle: + s.middle = a + case WidgetAreaBottom: + s.bottom = a + } +} diff --git a/server/pkg/scene/widget_section_test.go b/server/pkg/scene/widget_section_test.go new file mode 100644 index 000000000..9531cd8f2 --- /dev/null +++ b/server/pkg/scene/widget_section_test.go @@ -0,0 +1,243 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewWidgetSection(t *testing.T) { + assert.Equal(t, &WidgetSection{}, NewWidgetSection()) +} + +func TestWidgetSection_Area(t *testing.T) { + ws := NewWidgetSection() + assert.Same(t, ws.top, ws.Area(WidgetAreaTop)) + assert.NotNil(t, ws.top) + assert.Same(t, ws.middle, ws.Area(WidgetAreaMiddle)) + assert.NotNil(t, ws.middle) + assert.Same(t, ws.bottom, ws.Area(WidgetAreaBottom)) + assert.NotNil(t, ws.bottom) +} + +func TestWidgetSection_Find(t *testing.T) { + wid1 := NewWidgetID() + wid2 := NewWidgetID() + wid3 := NewWidgetID() + wid4 := NewWidgetID() + wid5 := NewWidgetID() + wid6 := NewWidgetID() + wid7 := NewWidgetID() + + tests := []struct { + Name string + Input WidgetID + Expected1 int + Expected2 WidgetAreaType + Nil bool + }{ + { + Name: "top", + Input: wid2, + Expected1: 1, + Expected2: WidgetAreaTop, + }, + { + Name: "middle", + Input: wid4, + Expected1: 0, + Expected2: WidgetAreaMiddle, + }, + { + Name: "bottom", + Input: wid7, + Expected1: 1, + Expected2: WidgetAreaBottom, + }, + { + Name: "invalid id", + Input: NewWidgetID(), + Expected1: -1, + Expected2: "", + }, + { + Name: "Return nil if no widget section", + Input: wid1, + Nil: true, + Expected1: -1, + Expected2: "", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + if tc.Nil { + index, area := (*WidgetSection)(nil).Find(tc.Input) + assert.Equal(t, tc.Expected1, index) + assert.Equal(t, tc.Expected2, area) + return + } + + ws := NewWidgetSection() + ws.Area(WidgetAreaTop).AddAll(WidgetIDList{wid1, wid2, wid3}) + ws.Area(WidgetAreaMiddle).AddAll(WidgetIDList{wid4, wid5}) + ws.Area(WidgetAreaBottom).AddAll(WidgetIDList{wid6, wid7}) + + index, area := ws.Find(tc.Input) + assert.Equal(t, tc.Expected1, index) + assert.Equal(t, tc.Expected2, area) + }) + } +} + +func TestWidgetSection_Remove(t *testing.T) { + wid := NewWidgetID() + + tests := []struct { + Name string + Area WidgetAreaType + Input WidgetID + Expected WidgetIDList + Nil bool + }{ + { + Name: "top: remove a widget from widget section", + Area: WidgetAreaTop, + Input: wid, + Expected: WidgetIDList{}, + }, + { + Name: "top: couldn't find widgetId", + Area: WidgetAreaTop, + Input: NewWidgetID(), + Expected: WidgetIDList{wid}, + }, + { + Name: "middle: remove a widget from widget section", + Area: WidgetAreaMiddle, + Input: wid, + Expected: WidgetIDList{}, + }, + { + Name: "middle: couldn't find widgetId", + Area: WidgetAreaMiddle, + Input: NewWidgetID(), + Expected: WidgetIDList{wid}, + }, + { + Name: "bottom: remove a widget from widget section", + Area: WidgetAreaBottom, + Input: wid, + Expected: WidgetIDList{}, + }, + { + Name: "bottom: couldn't find widgetId", + Area: WidgetAreaBottom, + Input: NewWidgetID(), + Expected: WidgetIDList{wid}, + }, + { + Name: "nil", + Area: WidgetAreaTop, + Input: wid, + Nil: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + if tc.Nil { + (*WidgetSection)(nil).Remove(tc.Input) + return + } + + ws := NewWidgetSection() + ws.Area(tc.Area).Add(wid, -1) + ws.Remove(tc.Input) + assert.Equal(t, tc.Expected, ws.Area(tc.Area).WidgetIDs()) + }) + } +} + +func TestWidgetSection_SetArea(t *testing.T) { + type args struct { + t WidgetAreaType + a *WidgetArea + } + + tests := []struct { + name string + args args + nil bool + }{ + { + name: "top", + args: args{ + t: WidgetAreaTop, + a: &WidgetArea{}, + }, + }, + { + name: "middle", + args: args{ + t: WidgetAreaMiddle, + a: &WidgetArea{}, + }, + }, + { + name: "bottom", + args: args{ + t: WidgetAreaBottom, + a: &WidgetArea{}, + }, + }, + { + name: "nil area", + args: args{ + t: WidgetAreaTop, + a: nil, + }, + }, + { + name: "nil", + args: args{ + t: WidgetAreaTop, + a: &WidgetArea{}, + }, + nil: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var s *WidgetSection + if !tt.nil { + s = &WidgetSection{} + } + + s.SetArea(tt.args.t, tt.args.a) + + if !tt.nil { + var a2 *WidgetArea + switch tt.args.t { + case WidgetAreaTop: + a2 = s.top + case WidgetAreaMiddle: + a2 = s.middle + case WidgetAreaBottom: + a2 = s.bottom + } + assert.Same(t, tt.args.a, a2) + } + }) + } +} diff --git a/server/pkg/scene/widget_test.go b/server/pkg/scene/widget_test.go new file mode 100644 index 000000000..48db36e66 --- /dev/null +++ b/server/pkg/scene/widget_test.go @@ -0,0 +1,142 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewWidget(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() + + tests := []struct { + Name string + ID WidgetID + Plugin PluginID + Extension PluginExtensionID + Property PropertyID + Enabled bool + Extended bool + Err error + }{ + { + Name: "success new widget", + ID: wid, + Plugin: pid, + Extension: "eee", + Property: pr, + Enabled: true, + Extended: true, + Err: nil, + }, + { + Name: "fail empty extension", + ID: wid, + Plugin: pid, + Extension: "", + Property: pr, + Enabled: true, + Extended: false, + Err: ErrInvalidID, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res, err := NewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) + if tc.Err == nil { + assert.Equal(t, tc.ID, res.ID()) + assert.Equal(t, tc.Property, res.Property()) + assert.Equal(t, tc.Extension, res.Extension()) + assert.Equal(t, tc.Enabled, res.Enabled()) + assert.Equal(t, tc.Extended, res.Extended()) + assert.Equal(t, tc.Plugin, res.Plugin()) + } else { + assert.ErrorIs(t, err, tc.Err) + } + }) + } +} + +func TestMustNewWidget(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() + + tests := []struct { + Name string + ID WidgetID + Plugin PluginID + Extension PluginExtensionID + Property PropertyID + Enabled bool + Extended bool + Err error + }{ + { + Name: "success new widget", + ID: wid, + Plugin: pid, + Extension: "eee", + Property: pr, + Enabled: true, + Extended: true, + Err: nil, + }, + { + Name: "fail empty extension", + ID: wid, + Plugin: pid, + Extension: "", + Property: pr, + Enabled: true, + Extended: false, + Err: ErrInvalidID, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + if tc.Err != nil { + assert.PanicsWithError(t, tc.Err.Error(), func() { + MustWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) + }) + return + } + + res := MustWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) + assert.Equal(t, tc.ID, res.ID()) + assert.Equal(t, tc.Property, res.Property()) + assert.Equal(t, tc.Extension, res.Extension()) + assert.Equal(t, tc.Enabled, res.Enabled()) + assert.Equal(t, tc.Plugin, res.Plugin()) + }) + } +} + +func TestWidget_SetEnabled(t *testing.T) { + res := MustWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", NewPropertyID(), false, false) + res.SetEnabled(true) + assert.True(t, res.Enabled()) +} + +func TestWidget_SetExtended(t *testing.T) { + res := MustWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", NewPropertyID(), false, false) + res.SetExtended(true) + assert.True(t, res.Extended()) +} + +func TestWidget_Clone(t *testing.T) { + res := MustWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", NewPropertyID(), false, false) + res2 := res.Clone() + assert.Equal(t, res, res2) + assert.NotSame(t, res, res2) + assert.Nil(t, (*Widget)(nil).Clone()) +} diff --git a/server/pkg/scene/widget_zone.go b/server/pkg/scene/widget_zone.go new file mode 100644 index 000000000..a7936a9e5 --- /dev/null +++ b/server/pkg/scene/widget_zone.go @@ -0,0 +1,84 @@ +package scene + +// WidgetZone is the structure of each layer (inner and outer) of the align system. +type WidgetZone struct { + left *WidgetSection + center *WidgetSection + right *WidgetSection +} + +type WidgetSectionType string + +const ( + WidgetSectionLeft WidgetSectionType = "left" + WidgetSectionCenter WidgetSectionType = "center" + WidgetSectionRight WidgetSectionType = "right" +) + +func NewWidgetZone() *WidgetZone { + return &WidgetZone{} +} + +func (wz *WidgetZone) Section(s WidgetSectionType) *WidgetSection { + switch s { + case WidgetSectionLeft: + if wz.left == nil { + wz.left = NewWidgetSection() + } + return wz.left + case WidgetSectionCenter: + if wz.center == nil { + wz.center = NewWidgetSection() + } + return wz.center + case WidgetSectionRight: + if wz.right == nil { + wz.right = NewWidgetSection() + } + return wz.right + } + return nil +} + +func (z *WidgetZone) Remove(wid WidgetID) { + if z == nil { + return + } + + z.left.Remove(wid) + z.center.Remove(wid) + z.right.Remove(wid) +} + +func (z *WidgetZone) Find(wid WidgetID) (int, WidgetSectionType, WidgetAreaType) { + if z == nil { + return -1, "", "" + } + + if i, wa := z.left.Find(wid); i >= 0 { + return i, WidgetSectionLeft, wa + } + if i, wa := z.center.Find(wid); i >= 0 { + return i, WidgetSectionCenter, wa + } + if i, wa := z.right.Find(wid); i >= 0 { + return i, WidgetSectionRight, wa + } + + return -1, "", "" +} + +func (z *WidgetZone) SetSection(t WidgetSectionType, s *WidgetSection) { + if z == nil { + return + } + + switch t { + case WidgetSectionLeft: + z.left = s + case WidgetSectionCenter: + z.center = s + case WidgetSectionRight: + z.right = s + } +} diff --git a/server/pkg/scene/widget_zone_test.go b/server/pkg/scene/widget_zone_test.go new file mode 100644 index 000000000..d9ad579a1 --- /dev/null +++ b/server/pkg/scene/widget_zone_test.go @@ -0,0 +1,251 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewWidgetZone(t *testing.T) { + assert.Equal(t, &WidgetZone{}, NewWidgetZone()) +} + +func TestWidgetZone_Section(t *testing.T) { + wz := NewWidgetZone() + assert.Same(t, wz.left, wz.Section(WidgetSectionLeft)) + assert.NotNil(t, wz.left) + assert.Same(t, wz.center, wz.Section(WidgetSectionCenter)) + assert.NotNil(t, wz.center) + assert.Same(t, wz.right, wz.Section(WidgetSectionRight)) + assert.NotNil(t, wz.right) +} + +func TestWidgetZone_Find(t *testing.T) { + wid1 := NewWidgetID() + wid2 := NewWidgetID() + wid3 := NewWidgetID() + wid4 := NewWidgetID() + wid5 := NewWidgetID() + wid6 := NewWidgetID() + wid7 := NewWidgetID() + + tests := []struct { + Name string + Input WidgetID + Expected1 int + Expected2 WidgetSectionType + Expected3 WidgetAreaType + Nil bool + }{ + { + Name: "left", + Input: wid2, + Expected1: 1, + Expected2: WidgetSectionLeft, + Expected3: WidgetAreaTop, + }, + { + Name: "center", + Input: wid4, + Expected1: 0, + Expected2: WidgetSectionCenter, + Expected3: WidgetAreaTop, + }, + { + Name: "right", + Input: wid7, + Expected1: 1, + Expected2: WidgetSectionRight, + Expected3: WidgetAreaTop, + }, + { + Name: "invalid id", + Input: NewWidgetID(), + Expected1: -1, + Expected2: "", + Expected3: "", + }, + { + Name: "Return nil if no widget section", + Input: wid1, + Nil: true, + Expected1: -1, + Expected2: "", + Expected3: "", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + if tc.Nil { + index, section, area := (*WidgetZone)(nil).Find(tc.Input) + assert.Equal(t, tc.Expected1, index) + assert.Equal(t, tc.Expected2, section) + assert.Equal(t, tc.Expected3, area) + return + } + + ez := NewWidgetZone() + ez.Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll(WidgetIDList{wid1, wid2, wid3}) + ez.Section(WidgetSectionCenter).Area(WidgetAreaTop).AddAll(WidgetIDList{wid4, wid5}) + ez.Section(WidgetSectionRight).Area(WidgetAreaTop).AddAll(WidgetIDList{wid6, wid7}) + + index, section, area := ez.Find(tc.Input) + assert.Equal(t, tc.Expected1, index) + assert.Equal(t, tc.Expected2, section) + assert.Equal(t, tc.Expected3, area) + }) + } +} + +func TestWidgetZone_Remove(t *testing.T) { + wid := NewWidgetID() + + tests := []struct { + Name string + Section WidgetSectionType + Input WidgetID + Expected WidgetIDList + Nil bool + }{ + { + Name: "left: remove a widget from widget section", + Section: WidgetSectionLeft, + Input: wid, + Expected: WidgetIDList{}, + }, + { + Name: "left: couldn't find widgetId", + Section: WidgetSectionLeft, + Input: NewWidgetID(), + Expected: WidgetIDList{wid}, + }, + { + Name: "center: remove a widget from widget section", + Section: WidgetSectionCenter, + Input: wid, + Expected: WidgetIDList{}, + }, + { + Name: "center: couldn't find widgetId", + Section: WidgetSectionCenter, + Input: NewWidgetID(), + Expected: WidgetIDList{wid}, + }, + { + Name: "right: remove a widget from widget section", + Section: WidgetSectionRight, + Input: wid, + Expected: WidgetIDList{}, + }, + { + Name: "right: couldn't find widgetId", + Section: WidgetSectionRight, + Input: NewWidgetID(), + Expected: WidgetIDList{wid}, + }, + { + Name: "nil", + Section: WidgetSectionLeft, + Input: wid, + Nil: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + if tc.Nil { + (*WidgetZone)(nil).Remove(tc.Input) + return + } + + ws := NewWidgetZone() + ws.Section(tc.Section).Area(WidgetAreaTop).Add(wid, -1) + ws.Remove(tc.Input) + assert.Equal(t, tc.Expected, ws.Section(tc.Section).Area(WidgetAreaTop).WidgetIDs()) + }) + } +} + +func TestWidgetZone_SetSection(t *testing.T) { + type args struct { + t WidgetSectionType + s *WidgetSection + } + + tests := []struct { + name string + args args + nil bool + }{ + { + name: "left", + args: args{ + t: WidgetSectionLeft, + s: &WidgetSection{}, + }, + }, + { + name: "center", + args: args{ + t: WidgetSectionCenter, + s: &WidgetSection{}, + }, + }, + { + name: "right", + args: args{ + t: WidgetSectionRight, + s: &WidgetSection{}, + }, + }, + { + name: "nil area", + args: args{ + t: WidgetSectionLeft, + s: nil, + }, + }, + { + name: "nil", + args: args{ + t: WidgetSectionLeft, + s: &WidgetSection{}, + }, + nil: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var z *WidgetZone + if !tt.nil { + z = &WidgetZone{} + } + + z.SetSection(tt.args.t, tt.args.s) + + if !tt.nil { + var s2 *WidgetSection + switch tt.args.t { + case WidgetSectionLeft: + s2 = z.left + case WidgetSectionCenter: + s2 = z.center + case WidgetSectionRight: + s2 = z.right + } + assert.Same(t, tt.args.s, s2) + } + }) + } +} diff --git a/server/pkg/scene/widgets.go b/server/pkg/scene/widgets.go new file mode 100644 index 000000000..a671d7759 --- /dev/null +++ b/server/pkg/scene/widgets.go @@ -0,0 +1,135 @@ +package scene + +import ( + "errors" +) + +var ( + ErrDuplicatedWidgetInstance = errors.New("duplicated widget instance") +) + +type Widgets struct { + widgets []*Widget + align *WidgetAlignSystem +} + +func NewWidgets(w []*Widget, a *WidgetAlignSystem) *Widgets { + if a == nil { + a = NewWidgetAlignSystem() + } + if w == nil { + return &Widgets{widgets: []*Widget{}, align: a} + } + w2 := make([]*Widget, 0, len(w)) + for _, w1 := range w { + if w1 == nil { + continue + } + duplicated := false + for _, w3 := range w2 { + if w1.ID() == w3.ID() { + duplicated = true + break + } + } + if !duplicated { + w2 = append(w2, w1) + } + } + return &Widgets{widgets: w2, align: a} +} + +func (w *Widgets) Widgets() []*Widget { + if w == nil { + return nil + } + return append([]*Widget{}, w.widgets...) +} + +func (w *Widgets) Alignment() *WidgetAlignSystem { + if w == nil { + return nil + } + return w.align +} + +func (w *Widgets) Widget(wid WidgetID) *Widget { + if w == nil { + return nil + } + for _, ww := range w.widgets { + if ww.ID() == wid { + return ww + } + } + return nil +} + +func (w *Widgets) Has(wid WidgetID) bool { + if w == nil { + return false + } + for _, w2 := range w.widgets { + if w2.ID() == wid { + return true + } + } + return false +} + +func (w *Widgets) Add(sw *Widget) { + if w == nil || sw == nil || w.Has(sw.ID()) { + return + } + w.widgets = append(w.widgets, sw) +} + +func (w *Widgets) Remove(wid WidgetID) { + if w == nil { + return + } + for i := 0; i < len(w.widgets); i++ { + if w.widgets[i].ID() == wid { + w.widgets = append(w.widgets[:i], w.widgets[i+1:]...) + return + } + } +} + +func (w *Widgets) RemoveAllByPlugin(p PluginID, e *PluginExtensionID) (res []PropertyID) { + if w == nil { + return nil + } + for i := 0; i < len(w.widgets); i++ { + ww := w.widgets[i] + if ww.Plugin().Equal(p) && (e == nil || ww.Extension() == *e) { + res = append(res, ww.Property()) + w.align.Remove(ww.ID()) + w.widgets = append(w.widgets[:i], w.widgets[i+1:]...) + i-- + } + } + return res +} + +func (w *Widgets) UpgradePlugin(oldp, newp PluginID) { + if w == nil || w.widgets == nil || oldp.Equal(newp) || oldp.IsNil() || newp.IsNil() { + return + } + for _, ww := range w.widgets { + if ww.plugin.Equal(oldp) { + ww.SetPlugin(newp) + } + } +} + +func (w *Widgets) Properties() []PropertyID { + if w == nil { + return nil + } + res := make([]PropertyID, 0, len(w.widgets)) + for _, ww := range w.widgets { + res = append(res, ww.property) + } + return res +} diff --git a/server/pkg/scene/widgets_test.go b/server/pkg/scene/widgets_test.go new file mode 100644 index 000000000..5cc70332c --- /dev/null +++ b/server/pkg/scene/widgets_test.go @@ -0,0 +1,396 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewWidgets(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() + + tests := []struct { + Name string + Input []*Widget + Expected []*Widget + }{ + { + Name: "nil widget list", + Input: nil, + Expected: []*Widget{}, + }, + { + Name: "widget list with nil", + Input: []*Widget{nil}, + Expected: []*Widget{}, + }, + { + Name: "widget list", + Input: []*Widget{ + MustWidget(wid, pid, "see", pr, true, false), + }, + Expected: []*Widget{ + MustWidget(wid, pid, "see", pr, true, false), + }, + }, + { + Name: "widget list with duplicatd values", + Input: []*Widget{ + MustWidget(wid, pid, "see", pr, true, false), + MustWidget(wid, pid, "see", pr, true, false), + }, + Expected: []*Widget{ + MustWidget(wid, pid, "see", pr, true, false), + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, NewWidgets(tc.Input, nil).Widgets()) + }) + } +} + +func TestWidgets_Add(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() + + tests := []struct { + Name string + Widgets []*Widget + Input *Widget + Expected []*Widget + Nil bool + }{ + { + Name: "add new widget", + Input: MustWidget(wid, pid, "see", pr, true, false), + Expected: []*Widget{MustWidget(wid, pid, "see", pr, true, false)}, + }, + { + Name: "add nil widget", + Input: nil, + Expected: []*Widget{}, + }, + { + Name: "add to nil widgets", + Input: MustWidget(wid, pid, "see", pr, true, false), + Expected: nil, + Nil: true, + }, + { + Name: "add existing widget", + Widgets: []*Widget{MustWidget(wid, pid, "see", pr, true, false)}, + Input: MustWidget(wid, pid, "see", pr, true, false), + Expected: []*Widget{MustWidget(wid, pid, "see", pr, true, false)}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + var ws *Widgets + if !tc.Nil { + ws = NewWidgets(tc.Widgets, nil) + } + ws.Add(tc.Input) + assert.Equal(t, tc.Expected, ws.Widgets()) + }) + } +} + +func TestWidgets_Remove(t *testing.T) { + wid := NewWidgetID() + wid2 := NewWidgetID() + pid := MustPluginID("xxx~1.1.1") + pid2 := MustPluginID("xxx~1.1.2") + pr := NewPropertyID() + + tests := []struct { + Name string + Input WidgetID + Nil bool + }{ + { + Name: "remove a widget", + Input: wid, + }, + { + Name: "remove from nil widgets", + Input: wid, + Nil: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + var ws *Widgets + if !tc.Nil { + ws = NewWidgets([]*Widget{ + MustWidget(wid, pid2, "e1", pr, true, false), + MustWidget(wid2, pid, "e1", pr, true, false), + }, nil) + assert.True(t, ws.Has(tc.Input)) + } + ws.Remove(tc.Input) + assert.False(t, ws.Has(tc.Input)) + }) + } +} + +func TestWidgets_RemoveAllByPlugin(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pid2 := MustPluginID("xxx~1.1.2") + w1 := MustWidget(NewWidgetID(), pid, "e1", NewPropertyID(), true, false) + w2 := MustWidget(NewWidgetID(), pid, "e2", NewPropertyID(), true, false) + w3 := MustWidget(NewWidgetID(), pid2, "e1", NewPropertyID(), true, false) + + tests := []struct { + Name string + ArgsPID PluginID + ArgsEID *PluginExtensionID + Target, Expected *Widgets + ExpectedResult []PropertyID + }{ + { + Name: "remove widgets", + ArgsPID: pid, + ArgsEID: nil, + Target: NewWidgets([]*Widget{w1, w2, w3}, nil), + Expected: NewWidgets([]*Widget{w3}, nil), + ExpectedResult: []PropertyID{w1.Property(), w2.Property()}, + }, + { + Name: "remove widgets of extension", + ArgsPID: pid, + ArgsEID: PluginExtensionID("e2").Ref(), + Target: NewWidgets([]*Widget{w1, w2, w3}, nil), + Expected: NewWidgets([]*Widget{w1, w3}, nil), + ExpectedResult: []PropertyID{w2.Property()}, + }, + { + Name: "remove from nil widgets", + Target: nil, + Expected: nil, + ExpectedResult: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.ExpectedResult, tc.Target.RemoveAllByPlugin(tc.ArgsPID, tc.ArgsEID)) + assert.Equal(t, tc.Expected, tc.Target) + }) + } +} + +func TestWidgets_UpgradePlugin(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pid2 := MustPluginID("zzz~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() + + tests := []struct { + Name string + PID, NewID PluginID + WS, Expected *Widgets + }{ + { + Name: "replace a widget", + PID: pid, + NewID: pid2, + WS: NewWidgets([]*Widget{MustWidget(wid, pid, "eee", pr, true, false)}, nil), + Expected: NewWidgets([]*Widget{MustWidget(wid, pid2, "eee", pr, true, false)}, nil), + }, + { + Name: "replace with nil widget", + PID: pid, + WS: NewWidgets(nil, nil), + Expected: NewWidgets(nil, nil), + }, + { + Name: "replace from nil widgets", + WS: nil, + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + tc.WS.UpgradePlugin(tc.PID, tc.NewID) + assert.Equal(t, tc.Expected, tc.WS) + }) + } +} + +func TestWidgets_Properties(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + pr2 := NewPropertyID() + wid := NewWidgetID() + wid2 := NewWidgetID() + + tests := []struct { + Name string + WS *Widgets + Expected []PropertyID + }{ + { + Name: "get properties", + WS: NewWidgets([]*Widget{ + MustWidget(wid, pid, "eee", pr, true, false), + MustWidget(wid2, pid, "eee", pr2, true, false), + }, nil), + Expected: []PropertyID{pr, pr2}, + }, + { + Name: "get properties from nil widgets", + WS: nil, + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.WS.Properties() + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestWidgets_Widgets(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + pr2 := NewPropertyID() + wid := NewWidgetID() + wid2 := NewWidgetID() + + tests := []struct { + Name string + WS *Widgets + Expected []*Widget + }{ + { + Name: "get widgets", + WS: NewWidgets([]*Widget{ + MustWidget(wid, pid, "eee", pr, true, false), + MustWidget(wid2, pid, "eee", pr2, true, false), + }, nil), + Expected: []*Widget{ + MustWidget(wid, pid, "eee", pr, true, false), + MustWidget(wid2, pid, "eee", pr2, true, false), + }, + }, + { + Name: "get widgets from nil widgets", + WS: nil, + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.WS.Widgets() + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestWidgets_Widget(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() + + tests := []struct { + Name string + ID WidgetID + WS *Widgets + Expected *Widget + }{ + { + Name: "get a widget", + ID: wid, + WS: NewWidgets([]*Widget{MustWidget(wid, pid, "eee", pr, true, false)}, nil), + Expected: MustWidget(wid, pid, "eee", pr, true, false), + }, + { + Name: "dont has the widget", + ID: wid, + WS: NewWidgets([]*Widget{}, nil), + Expected: nil, + }, + { + Name: "get widget from nil widgets", + ID: wid, + WS: nil, + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.WS.Widget(tc.ID) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestWidgets_Has(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() + + tests := []struct { + Name string + ID WidgetID + WS *Widgets + Expected bool + }{ + { + Name: "has a widget", + ID: wid, + WS: NewWidgets([]*Widget{MustWidget(wid, pid, "eee", pr, true, false)}, nil), + Expected: true, + }, + { + Name: "dont has a widget", + ID: wid, + WS: NewWidgets([]*Widget{}, nil), + Expected: false, + }, + { + Name: "has from nil widgets", + ID: wid, + WS: nil, + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.WS.Has(tc.ID) + assert.Equal(t, tc.Expected, res) + }) + } +} diff --git a/server/pkg/shp/errreader.go b/server/pkg/shp/errreader.go new file mode 100644 index 000000000..4f2d63c3a --- /dev/null +++ b/server/pkg/shp/errreader.go @@ -0,0 +1,27 @@ +package shp + +import ( + "fmt" + "io" +) + +// errReader is a helper to perform multiple successive read from another reader +// and do the error checking only once afterwards. It will not perform any new +// reads in case there was an error encountered earlier. +type errReader struct { + io.Reader + e error + n int64 +} + +func (er *errReader) Read(p []byte) (n int, err error) { + if er.e != nil { + return 0, fmt.Errorf("unable to read after previous error: %v", er.e) + } + n, err = er.Reader.Read(p) + if n < len(p) && err != nil { + er.e = err + } + er.n += int64(n) + return n, er.e +} diff --git a/server/pkg/shp/reader.go b/server/pkg/shp/reader.go new file mode 100644 index 000000000..9687b6740 --- /dev/null +++ b/server/pkg/shp/reader.go @@ -0,0 +1,289 @@ +package shp + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + "math" +) + +// Reader provides a interface for reading Shapefiles. Calls +// to the Next method will iterate through the objects in the +// Shapefile. After a call to Next the object will be available +// through the Shape method. +type Reader struct { + GeometryType ShapeType + bbox Box + err error + + shp io.ReadSeeker + shape Shape + num int32 + // filename string + filelength int64 + + /* Note: not used + dbf io.ReadSeeker + dbfFields []Field + dbfNumRecords int32 + dbfHeaderLength int16 + dbfRecordLength int16*/ +} + +// ReadFrom read from io.Reader +func ReadFrom(r io.Reader) (*Reader, error) { + buf := new(bytes.Buffer) + _, err := buf.ReadFrom(r) + if err != nil { + return nil, err + } + reader := bytes.NewReader(buf.Bytes()) + sr := &Reader{shp: reader} + return sr, sr.readHeaders() +} + +// BBox returns the bounding box of the shapefile. +func (r *Reader) BBox() Box { + return r.bbox +} + +// Read and parse headers in the Shapefile. This will +// fill out GeometryType, filelength and bbox. +func (r *Reader) readHeaders() error { + er := &errReader{Reader: r.shp} + // don't trust the the filelength in the header + r.filelength, _ = r.shp.Seek(0, io.SeekEnd) + + var filelength int32 + _, err := r.shp.Seek(24, 0) + if err != nil { + return err + } + // file length + err = binary.Read(er, binary.BigEndian, &filelength) + if err != nil { + return err + } + _, err = r.shp.Seek(32, 0) + if err != nil { + return err + } + err = binary.Read(er, binary.LittleEndian, &r.GeometryType) + if err != nil { + return err + } + r.bbox.MinX = readFloat64(er) + r.bbox.MinY = readFloat64(er) + r.bbox.MaxX = readFloat64(er) + r.bbox.MaxY = readFloat64(er) + _, err = r.shp.Seek(100, 0) + if err != nil { + return err + } + return er.e +} + +func readFloat64(r io.Reader) float64 { + var bits uint64 + _ = binary.Read(r, binary.LittleEndian, &bits) + return math.Float64frombits(bits) +} + +// Close closes the Shapefile. + +// Shape returns the most recent feature that was read by +// a call to Next. It returns two values, the int is the +// object index starting from zero in the shapefile which +// can be used as row in ReadAttribute, and the Shape is the object. +func (r *Reader) Shape() (int, Shape) { + return int(r.num) - 1, r.shape +} + +/* Note: not used +// Attribute returns value of the n-th attribute of the most recent feature +// that was read by a call to Next. +func (r *Reader) Attribute(n int) string { + return r.ReadAttribute(int(r.num)-1, n) +}*/ + +// newShape creates a new shape with a given type. +func newShape(shapetype ShapeType) (Shape, error) { + switch shapetype { + case NULL: + return new(Null), nil + case POINT: + return new(Point), nil + case POLYLINE: + return new(PolyLine), nil + case POLYGON: + return new(Polygon), nil + case MULTIPOINT: + return new(MultiPoint), nil + case POINTZ: + return new(PointZ), nil + case POLYLINEZ: + return new(PolyLineZ), nil + case POLYGONZ: + return new(PolygonZ), nil + case MULTIPOINTZ: + return new(MultiPointZ), nil + case POINTM: + return new(PointM), nil + case POLYLINEM: + return new(PolyLineM), nil + case POLYGONM: + return new(PolygonM), nil + case MULTIPOINTM: + return new(MultiPointM), nil + case MULTIPATCH: + return new(MultiPatch), nil + default: + return nil, fmt.Errorf("Unsupported shape type: %v", shapetype) + } +} + +// Next reads in the next Shape in the Shapefile, which +// will then be available through the Shape method. It +// returns false when the reader has reached the end of the +// file or encounters an error. +func (r *Reader) Next() bool { + cur, _ := r.shp.Seek(0, io.SeekCurrent) + if cur >= r.filelength { + return false + } + + var size int32 + var shapetype ShapeType + er := &errReader{Reader: r.shp} + err1 := binary.Read(er, binary.BigEndian, &r.num) + if err1 != nil { + r.err = err1 + } + err1 = binary.Read(er, binary.BigEndian, &size) + if err1 != nil { + r.err = err1 + } + err1 = binary.Read(er, binary.LittleEndian, &shapetype) + if err1 != nil { + r.err = err1 + } + if er.e != nil { + if er.e != io.EOF { + r.err = fmt.Errorf("error when reading metadata of next shape: %v", er.e) + } else { + r.err = io.EOF + } + return false + } + + var err error + r.shape, err = newShape(shapetype) + if err != nil { + r.err = fmt.Errorf("error decoding shape type: %v", err) + return false + } + err = r.shape.read(er) + if err != nil { + r.err = fmt.Errorf("error while reading next shape: %v", err) + return false + } + if er.e != nil { + r.err = fmt.Errorf("error while reading next shape: %v", er.e) + return false + } + + // move to next object + _, err = r.shp.Seek(int64(size)*2+cur+8, 0) + if err != nil { + r.err = fmt.Errorf("error while seeking: %v", err) + return false + } + return true +} + +/* Note: not used +// Opens DBF file using r.filename + "dbf". This method +// will parse the header and fill out all dbf* values int +// the f object. +func (r *Reader) openDbf() (err error) { + if r.dbf != nil { + return + } + + r.dbf, err = os.Open(r.filename + ".dbf") + if err != nil { + return + } + + // read header + _, err = r.dbf.Seek(4, io.SeekStart) + if err != nil { + return err + } + err = binary.Read(r.dbf, binary.LittleEndian, &r.dbfNumRecords) + if err != nil { + return err + } + err = binary.Read(r.dbf, binary.LittleEndian, &r.dbfHeaderLength) + if err != nil { + return err + } + err = binary.Read(r.dbf, binary.LittleEndian, &r.dbfRecordLength) + if err != nil { + return err + } + _, err = r.dbf.Seek(20, io.SeekCurrent) // skip padding + if err != nil { + return err + } + numFields := int(math.Floor(float64(r.dbfHeaderLength-33) / 32.0)) + r.dbfFields = make([]Field, numFields) + err = binary.Read(r.dbf, binary.LittleEndian, &r.dbfFields) + if err != nil { + return err + } + return +} + +// Fields returns a slice of Fields that are present in the +// DBF table. +func (r *Reader) Fields() []Field { + err := r.openDbf() // make sure we have dbf file to read from + if err != nil { + return nil + } + return r.dbfFields +}*/ + +// Err returns the last non-EOF error encountered. +func (r *Reader) Err() error { + if r.err == io.EOF { + return nil + } + return r.err +} + +/* Note: not used +// ReadAttribute returns the attribute value at row for field in +// the DBF table as a string. Both values starts at 0. +func (r *Reader) ReadAttribute(row int, field int) string { + err := r.openDbf() // make sure we have a dbf file to read from + if err != nil { + return "" + } + seekTo := 1 + int64(r.dbfHeaderLength) + (int64(row) * int64(r.dbfRecordLength)) + for n := 0; n < field; n++ { + seekTo += int64(r.dbfFields[n].Size) + } + _, err = r.dbf.Seek(seekTo, io.SeekStart) + if err != nil { + return "" + } + buf := make([]byte, r.dbfFields[field].Size) + _, err = r.dbf.Read(buf) + if err != nil { + return "" + } + return strings.Trim(string(buf[:]), " ") +}*/ diff --git a/server/pkg/shp/reader_test.go b/server/pkg/shp/reader_test.go new file mode 100644 index 000000000..7906715eb --- /dev/null +++ b/server/pkg/shp/reader_test.go @@ -0,0 +1,221 @@ +package shp + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func assertPointsEqual(t *testing.T, a, b []float64, msgAndArgs ...interface{}) bool { + if !assert.True(t, len(a) == len(b), msgAndArgs...) { + return false + } + + for k, v := range a { + if !assert.True(t, v == b[k], msgAndArgs...) { + return false + } + } + return true +} + +func getShapesFromFile(prefix string, t *testing.T) (shapes []Shape) { + filename := prefix + ".shp" + ior, _ := os.Open(filename) + file, err := ReadFrom(ior) + assert.Nil(t, err, "Failed to open shapefile") + + defer func() { + err := ior.Close() + assert.Nil(t, err, "Failed to close shapefile") + }() + + for file.Next() { + _, shape := file.Shape() + shapes = append(shapes, shape) + } + assert.Nil(t, file.Err(), "Error while getting shapes") + + return shapes +} + +func testPoint(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*Point) + assert.True(t, ok, "Failed to type assert.") + assertPointsEqual(t, []float64{p.X, p.Y}, points[n], "Points did not match.") + } +} + +func testPolyLine(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PolyLine) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y}, "Points did not match.") + } + } +} + +func testPolygon(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*Polygon) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y}, "Points did not match.") + } + } +} + +func testMultiPoint(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*MultiPoint) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y}, "Points did not match.") + } + } +} + +func testPointZ(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PointZ) + assert.True(t, ok, "Failed to type assert.") + assertPointsEqual(t, []float64{p.X, p.Y, p.Z}, points[n], "Points did not match.") + } +} + +func testPolyLineZ(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PolyLineZ) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.ZArray[k]}, "Points did not match.") + } + } +} + +func testPolygonZ(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PolygonZ) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.ZArray[k]}, "Points did not match.") + } + } +} + +func testMultiPointZ(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*MultiPointZ) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.ZArray[k]}, "Points did not match.") + } + } +} + +func testPointM(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PointM) + assert.True(t, ok, "Failed to type assert.") + assertPointsEqual(t, []float64{p.X, p.Y, p.M}, points[n], "Points did not match.") + } +} + +func testPolyLineM(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PolyLineM) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.MArray[k]}, "Points did not match.") + } + } +} + +func testPolygonM(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PolygonM) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.MArray[k]}, "Points did not match.") + } + } +} + +func testMultiPointM(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*MultiPointM) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.MArray[k]}, "Points did not match.") + } + } +} + +func testMultiPatch(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*MultiPatch) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.ZArray[k]}, "Points did not match.") + } + } +} + +func TestReadBBox(t *testing.T) { + tests := []struct { + filename string + want Box + }{ + {"test_files/multipatch.shp", Box{0, 0, 10, 10}}, + {"test_files/multipoint.shp", Box{0, 5, 10, 10}}, + {"test_files/multipointm.shp", Box{0, 5, 10, 10}}, + {"test_files/multipointz.shp", Box{0, 5, 10, 10}}, + {"test_files/point.shp", Box{0, 5, 10, 10}}, + {"test_files/pointm.shp", Box{0, 5, 10, 10}}, + {"test_files/pointz.shp", Box{0, 5, 10, 10}}, + {"test_files/polygon.shp", Box{0, 0, 5, 5}}, + {"test_files/polygonm.shp", Box{0, 0, 5, 5}}, + {"test_files/polygonz.shp", Box{0, 0, 5, 5}}, + {"test_files/polyline.shp", Box{0, 0, 25, 25}}, + {"test_files/polylinem.shp", Box{0, 0, 25, 25}}, + {"test_files/polylinez.shp", Box{0, 0, 25, 25}}, + } + + for _, tt := range tests { + f, _ := os.Open(tt.filename) + r, err := ReadFrom(f) + if err != nil { + t.Fatalf("%v", err) + } + if got := r.BBox().MinX; got != tt.want.MinX { + t.Errorf("got MinX = %v, want %v", got, tt.want.MinX) + } + if got := r.BBox().MinY; got != tt.want.MinY { + t.Errorf("got MinY = %v, want %v", got, tt.want.MinY) + } + if got := r.BBox().MaxX; got != tt.want.MaxX { + t.Errorf("got MaxX = %v, want %v", got, tt.want.MaxX) + } + if got := r.BBox().MaxY; got != tt.want.MaxY { + t.Errorf("got MaxY = %v, want %v", got, tt.want.MaxY) + } + } +} + +func TestReader(t *testing.T) { + tests := testData + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + shapes := getShapesFromFile(tc.name, t) + assert.Equal(t, tc.count, len(shapes), "Number of shapes for %s read was wrong. Wanted %d, got %d.", tc.name, tc.count, len(shapes)) + tc.tester(t, tc.points, shapes) + }) + } + +} diff --git a/server/pkg/shp/sequentialreader.go b/server/pkg/shp/sequentialreader.go new file mode 100644 index 000000000..ca6a7da11 --- /dev/null +++ b/server/pkg/shp/sequentialreader.go @@ -0,0 +1,300 @@ +package shp + +import ( + "encoding/binary" + "fmt" + "io" +) + +// SequentialReader is the interface that allows reading shapes and attributes one after another. It also embeds io.Closer. +type SequentialReader interface { + // Closer frees the resources allocated by the SequentialReader. + io.Closer + + // Next tries to advance the reading by one shape and one attribute row + // and returns true if the read operation could be performed without any + // error. + Next() bool + + // Shape returns the index and the last read shape. If the SequentialReader + // encountered any errors, nil is returned for the Shape. + Shape() (int, Shape) + + /* Note: not used + // Attribute returns the value of the n-th attribute in the current row. If + // the SequentialReader encountered any errors, the empty string is + // returned. + Attribute(n int) string + + // Fields returns the fields of the database. If the SequentialReader + // encountered any errors, nil is returned. + Fields() []Field*/ + + // Err returns the last non-EOF error encountered. + Err() error +} + +/* Note: not used +// Attributes returns all attributes of the shape that sr was last advanced to. +func Attributes(sr SequentialReader) []string { + if sr.Err() != nil { + return nil + } + s := make([]string, len(sr.Fields())) + for i := range s { + s[i] = sr.Attribute(i) + } + return s +} + +// AttributeCount returns the number of fields of the database. +func AttributeCount(sr SequentialReader) int { + return len(sr.Fields()) +}*/ + +// seqReader implements SequentialReader based on external io.ReadCloser +// instances +type seqReader struct { + shp/*, dbf*/ io.ReadCloser + err error + + geometryType ShapeType + bbox Box + + shape Shape + num int32 + filelength int64 + + /* Note: not used + dbfFields []Field + dbfNumRecords int32 + dbfHeaderLength int16 + dbfRecordLength int16 + dbfRow []byte*/ +} + +// Read and parse headers in the Shapefile. This will fill out GeometryType, +// filelength and bbox. +func (sr *seqReader) readHeaders() { + // contrary to Reader.readHeaders we cannot seek with the ReadCloser, so we + // need to trust the filelength in the header + + er := &errReader{Reader: sr.shp} + // shp headers + _, err := io.CopyN(io.Discard, er, 24) + if err != nil { + sr.err = fmt.Errorf("error when copy : %v", err) + return + } + var l int32 + err = binary.Read(er, binary.BigEndian, &l) + if err != nil { + sr.err = fmt.Errorf("error when reading : %v", err) + return + } + sr.filelength = int64(l) * 2 + _, err = io.CopyN(io.Discard, er, 4) + if err != nil { + sr.err = fmt.Errorf("error when copy : %v", err) + return + } + err = binary.Read(er, binary.LittleEndian, &sr.geometryType) + if err != nil { + sr.err = fmt.Errorf("error when reading : %v", err) + return + } + sr.bbox.MinX = readFloat64(er) + sr.bbox.MinY = readFloat64(er) + sr.bbox.MaxX = readFloat64(er) + sr.bbox.MaxY = readFloat64(er) + _, err = io.CopyN(io.Discard, er, 32) // skip four float64: Zmin, Zmax, Mmin, Max + if err != nil { + sr.err = fmt.Errorf("error when reading SHP header: %v", err) + return + } + if er.e != nil { + sr.err = fmt.Errorf("error when reading SHP header: %v", er.e) + return + } + + /* Note: not used + // dbf header + er = &errReader{Reader: sr.dbf} + if sr.dbf == nil { + return + } + _, err = io.CopyN(io.Discard, er, 4) + if err != nil { + sr.err = err + return + } + err = binary.Read(er, binary.LittleEndian, &sr.dbfNumRecords) + if err != nil { + sr.err = err + return + } + err = binary.Read(er, binary.LittleEndian, &sr.dbfHeaderLength) + if err != nil { + sr.err = err + return + } + err = binary.Read(er, binary.LittleEndian, &sr.dbfRecordLength) + if err != nil { + sr.err = err + return + } + _, err = io.CopyN(io.Discard, er, 20) // skip padding + if err != nil { + sr.err = err + return + } + numFields := int(math.Floor(float64(sr.dbfHeaderLength-33) / 32.0)) + sr.dbfFields = make([]Field, numFields) + err = binary.Read(er, binary.LittleEndian, &sr.dbfFields) + if err != nil { + sr.err = err + return + } + buf := make([]byte, 1) + _, err = er.Read(buf[:]) + if err != nil { + sr.err = fmt.Errorf("error when reading DBF header: %v", err) + return + } + if er.e != nil { + sr.err = fmt.Errorf("error when reading DBF header: %v", er.e) + return + } + if buf[0] != 0x0d { + sr.err = fmt.Errorf("Field descriptor array terminator not found") + return + } + sr.dbfRow = make([]byte, sr.dbfRecordLength)*/ +} + +// Next implements a method of interface SequentialReader for seqReader. +func (sr *seqReader) Next() bool { + if sr.err != nil { + return false + } + var num, size int32 + var shapetype ShapeType + + // read shape + er := &errReader{Reader: sr.shp} + err1 := binary.Read(er, binary.BigEndian, &num) + if err1 != nil { + return false + } + err1 = binary.Read(er, binary.BigEndian, &size) + if err1 != nil { + return false + } + err1 = binary.Read(er, binary.LittleEndian, &shapetype) + if err1 != nil { + return false + } + if er.e != nil { + if er.e != io.EOF { + sr.err = fmt.Errorf("error when reading shapefile header: %v", er.e) + } else { + sr.err = io.EOF + } + return false + } + sr.num = num + var err error + sr.shape, err = newShape(shapetype) + if err != nil { + sr.err = fmt.Errorf("error decoding shape type: %v", err) + return false + } + err = sr.shape.read(er) + if err != nil { + sr.err = fmt.Errorf("error reading shape : %v", err) + return false + } + switch { + case er.e == io.EOF: + // io.EOF means end-of-file was reached gracefully after all + // shape-internal reads succeeded, so it's not a reason stop + // iterating over all shapes. + er.e = nil + case er.e != nil: + sr.err = fmt.Errorf("error while reading next shape: %v", er.e) + return false + } + skipBytes := int64(size)*2 + 8 - er.n + _, ce := io.CopyN(io.Discard, er, skipBytes) + if er.e != nil { + sr.err = er.e + return false + } + if ce != nil { + sr.err = fmt.Errorf("error when discarding bytes on sequential read: %v", ce) + return false + } + /* Note: not used + if _, err := io.ReadFull(sr.dbf, sr.dbfRow); err != nil { + sr.err = fmt.Errorf("error when reading DBF row: %v", err) + return false + } + if sr.dbfRow[0] != 0x20 && sr.dbfRow[0] != 0x2a { + sr.err = fmt.Errorf("Attribute row %d starts with incorrect deletion indicator", num) + }*/ + return sr.err == nil +} + +// Shape implements a method of interface SequentialReader for seqReader. +func (sr *seqReader) Shape() (int, Shape) { + return int(sr.num) - 1, sr.shape +} + +/* Note: not used +// Attribute implements a method of interface SequentialReader for seqReader. +func (sr *seqReader) Attribute(n int) string { + if sr.err != nil { + return "" + } + start := 1 + f := 0 + for ; f < n; f++ { + start += int(sr.dbfFields[f].Size) + } + s := string(sr.dbfRow[start : start+int(sr.dbfFields[f].Size)]) + return strings.Trim(s, " ") +}*/ + +// Err returns the first non-EOF error that was encountered. +func (sr *seqReader) Err() error { + if sr.err == io.EOF { + return nil + } + return sr.err +} + +// Close closes the seqReader and free all the allocated resources. +func (sr *seqReader) Close() error { + if err := sr.shp.Close(); err != nil { + return err + } + /* Note: not used + if err := sr.dbf.Close(); err != nil { + return err + }*/ + return nil +} + +/* Note: not used +// Fields returns a slice of the fields that are present in the DBF table. +func (sr *seqReader) Fields() []Field { + return sr.dbfFields +}*/ + +// SequentialReaderFromExt returns a new SequentialReader that interprets shp +// as a source of shapes whose attributes can be retrieved from dbf. +func SequentialReaderFromExt(shp /*, dbf*/ io.ReadCloser) SequentialReader { + sr := &seqReader{shp: shp /*, dbf: dbf*/} + sr.readHeaders() + return sr +} diff --git a/server/pkg/shp/sequentialreader_test.go b/server/pkg/shp/sequentialreader_test.go new file mode 100644 index 000000000..2abf12b95 --- /dev/null +++ b/server/pkg/shp/sequentialreader_test.go @@ -0,0 +1,51 @@ +package shp + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func openFile(name string, t *testing.T) *os.File { + f, err := os.Open(name) + if err != nil { + t.Fatalf("Failed to open %s: %v", name, err) + } + return f +} + +func getShapesSequentially(prefix string, t *testing.T) (shapes []Shape) { + shp := openFile(prefix+".shp", t) + // dbf := openFile(prefix+".dbf", t) + + sr := SequentialReaderFromExt(shp /*, dbf*/) + err := sr.Err() + assert.Nil(t, err, "Error when iterating over the shapefile header") + + for sr.Next() { + _, shape := sr.Shape() + shapes = append(shapes, shape) + } + err = sr.Err() + assert.Nil(t, err, "Error when iterating over the shapes") + + err = sr.Close() + assert.Nil(t, err, "Could not close sequential reader") + + return shapes +} + +func TestSequentialReader(t *testing.T) { + tests := testData + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + shapes := getShapesSequentially(tc.name, t) + assert.Equal(t, tc.count, len(shapes), "Number of shapes for %s read was wrong. Wanted %d, got %d.", tc.name, tc.count, len(shapes)) + tc.tester(t, tc.points, shapes) + }) + } +} diff --git a/server/pkg/shp/shapefile.go b/server/pkg/shp/shapefile.go new file mode 100644 index 000000000..a1c661c0b --- /dev/null +++ b/server/pkg/shp/shapefile.go @@ -0,0 +1,1066 @@ +package shp + +import ( + "encoding/binary" + "io" +) + +//go:generate stringer -type=ShapeType + +// ShapeType is a identifier for the the type of shapes. +type ShapeType int32 + +// These are the possible shape types. +const ( + NULL ShapeType = 0 + POINT ShapeType = 1 + POLYLINE ShapeType = 3 + POLYGON ShapeType = 5 + MULTIPOINT ShapeType = 8 + POINTZ ShapeType = 11 + POLYLINEZ ShapeType = 13 + POLYGONZ ShapeType = 15 + MULTIPOINTZ ShapeType = 18 + POINTM ShapeType = 21 + POLYLINEM ShapeType = 23 + POLYGONM ShapeType = 25 + MULTIPOINTM ShapeType = 28 + MULTIPATCH ShapeType = 31 +) + +// Box structure made up from four coordinates. This type +// is used to represent bounding boxes +type Box struct { + MinX, MinY, MaxX, MaxY float64 +} + +// Extend extends the box with coordinates from the provided +// box. This method calls Box.ExtendWithPoint twice with +// {MinX, MinY} and {MaxX, MaxY} +func (b *Box) Extend(box Box) { + b.ExtendWithPoint(Point{box.MinX, box.MinY}) + b.ExtendWithPoint(Point{box.MaxX, box.MaxY}) +} + +// ExtendWithPoint extends box with coordinates from point +// if they are outside the range of the current box. +func (b *Box) ExtendWithPoint(p Point) { + if p.X < b.MinX { + b.MinX = p.X + } + if p.Y < b.MinY { + b.MinY = p.Y + } + if p.X > b.MaxX { + b.MaxX = p.X + } + if p.Y > b.MaxY { + b.MaxY = p.Y + } +} + +// BBoxFromPoints returns the bounding box calculated +// from points. +func BBoxFromPoints(points []Point) (box Box) { + for k, p := range points { + if k == 0 { + box = Box{p.X, p.Y, p.X, p.Y} + } else { + box.ExtendWithPoint(p) + } + } + return +} + +// Shape interface +type Shape interface { + BBox() Box + + read(io.Reader) error + write(io.Writer) error +} + +// Null is an empty shape. +type Null struct { +} + +// BBox Returns an empty BBox at the geometry origin. +func (n Null) BBox() Box { + return Box{0.0, 0.0, 0.0, 0.0} +} + +func (n *Null) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, n) + if err != nil { + return err + } + return nil +} + +func (n *Null) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, n) + if err != nil { + return err + } + return nil +} + +// Point is the shape that consists of single a geometry point. +type Point struct { + X, Y float64 +} + +// BBox returns the bounding box of the Point feature, i.e. an empty area at +// the point location itself. +func (p Point) BBox() Box { + return Box{p.X, p.Y, p.X, p.Y} +} + +func (p *Point) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, p) + if err != nil { + return err + } + return nil +} + +func (p *Point) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p) + if err != nil { + return err + } + return nil +} + +func flatten(points [][]Point) []Point { + n, i := 0, 0 + for _, v := range points { + n += len(v) + } + r := make([]Point, n) + for _, v := range points { + for _, p := range v { + r[i] = p + i++ + } + } + return r +} + +// PolyLine is a shape type that consists of an ordered set of vertices that +// consists of one or more parts. A part is a connected sequence of two ore +// more points. Parts may or may not be connected to another and may or may not +// intersect each other. +type PolyLine struct { + Box + NumParts int32 + NumPoints int32 + Parts []int32 + Points []Point +} + +// NewPolyLine returns a pointer a new PolyLine created +// with the provided points. The inner slice should be +// the points that the parent part consists of. +func NewPolyLine(parts [][]Point) *PolyLine { + points := flatten(parts) + + p := &PolyLine{} + p.NumParts = int32(len(parts)) + p.NumPoints = int32(len(points)) + p.Parts = make([]int32, len(parts)) + var marker int32 + for i, part := range parts { + p.Parts[i] = marker + marker += int32(len(part)) + } + p.Points = points + p.Box = p.BBox() + + return p +} + +// BBox returns the bounding box of the PolyLine feature +func (p PolyLine) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *PolyLine) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + return nil +} + +func (p *PolyLine) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + return nil +} + +// Polygon is identical to the PolyLine struct. However the parts must form +// rings that may not intersect. +type Polygon PolyLine + +// BBox returns the bounding box of the Polygon feature +func (p Polygon) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *Polygon) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + return nil +} + +func (p *Polygon) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + return nil +} + +// MultiPoint is the shape that consists of multiple points. +type MultiPoint struct { + Box Box + NumPoints int32 + Points []Point +} + +// BBox returns the bounding box of the MultiPoint feature +func (p MultiPoint) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *MultiPoint) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Points = make([]Point, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + return nil +} + +func (p *MultiPoint) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + return nil +} + +// PointZ is a triplet of double precision coordinates plus a measure. +type PointZ struct { + X float64 + Y float64 + Z float64 + M float64 +} + +// BBox eturns the bounding box of the PointZ feature which is an zero-sized area +// at the X and Y coordinates of the feature. +func (p PointZ) BBox() Box { + return Box{p.X, p.Y, p.X, p.Y} +} + +func (p *PointZ) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, p) + if err != nil { + return err + } + return nil +} + +func (p *PointZ) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p) + if err != nil { + return err + } + return nil +} + +// PolyLineZ is a shape which consists of one or more parts. A part is a +// connected sequence of two or more points. Parts may or may not be connected +// and may or may not intersect one another. +type PolyLineZ struct { + Box Box + NumParts int32 + NumPoints int32 + Parts []int32 + Points []Point + ZRange [2]float64 + ZArray []float64 + MRange [2]float64 + MArray []float64 +} + +// BBox eturns the bounding box of the PolyLineZ feature. +func (p PolyLineZ) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *PolyLineZ) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + p.ZArray = make([]float64, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZArray) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *PolyLineZ) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZArray) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// PolygonZ structure is identical to the PolyLineZ structure. +type PolygonZ PolyLineZ + +// BBox returns the bounding box of the PolygonZ feature +func (p PolygonZ) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *PolygonZ) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + p.ZArray = make([]float64, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZArray) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *PolygonZ) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZArray) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// MultiPointZ consists of one ore more PointZ. +type MultiPointZ struct { + Box Box + NumPoints int32 + Points []Point + ZRange [2]float64 + ZArray []float64 + MRange [2]float64 + MArray []float64 +} + +// BBox eturns the bounding box of the MultiPointZ feature. +func (p MultiPointZ) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *MultiPointZ) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Points = make([]Point, p.NumPoints) + p.ZArray = make([]float64, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZArray) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *MultiPointZ) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZArray) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// PointM is a point with a measure. +type PointM struct { + X float64 + Y float64 + M float64 +} + +// BBox returns the bounding box of the PointM feature which is a zero-sized +// area at the X- and Y-coordinates of the point. +func (p PointM) BBox() Box { + return Box{p.X, p.Y, p.X, p.Y} +} + +func (p *PointM) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, p) + if err != nil { + return err + } + return nil +} + +func (p *PointM) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p) + if err != nil { + return err + } + return nil +} + +// PolyLineM is the polyline in which each point also has a measure. +type PolyLineM struct { + Box Box + NumParts int32 + NumPoints int32 + Parts []int32 + Points []Point + MRange [2]float64 + MArray []float64 +} + +// BBox returns the bounding box of the PolyLineM feature. +func (p PolyLineM) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *PolyLineM) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *PolyLineM) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// PolygonM structure is identical to the PolyLineZ structure. +type PolygonM PolyLineZ + +// BBox returns the bounding box of the PolygonM feature. +func (p PolygonM) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *PolygonM) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *PolygonM) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// MultiPointM is the collection of multiple points with measures. +type MultiPointM struct { + Box Box + NumPoints int32 + Points []Point + MRange [2]float64 + MArray []float64 +} + +// BBox eturns the bounding box of the MultiPointM feature +func (p MultiPointM) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *MultiPointM) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Points = make([]Point, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *MultiPointM) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// MultiPatch consists of a number of surfaces patches. Each surface path +// descries a surface. The surface patches of a MultiPatch are referred to as +// its parts, and the type of part controls how the order of vertices of an +// MultiPatch part is interpreted. +type MultiPatch struct { + Box Box + NumParts int32 + NumPoints int32 + Parts []int32 + PartTypes []int32 + Points []Point + ZRange [2]float64 + ZArray []float64 + MRange [2]float64 + MArray []float64 +} + +// BBox returns the bounding box of the MultiPatch feature +func (p MultiPatch) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *MultiPatch) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.PartTypes = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + p.ZArray = make([]float64, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.PartTypes) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZArray) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *MultiPatch) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.PartTypes) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZArray) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// Field representation of a field object in the DBF file +type Field struct { + Name [11]byte + Fieldtype byte + Addr [4]byte // not used + Size uint8 + Precision uint8 + Padding [14]byte +} + +/* Note: not used +// Returns a string representation of the Field. Currently +// this only returns field name. +func (f Field) String() string { + return strings.TrimRight(string(f.Name[:]), "\x00") +} + +// StringField returns a Field that can be used in SetFields to initialize the +// DBF file. +func StringField(name string, length uint8) Field { + // TODO: Error checking + field := Field{Fieldtype: 'C', Size: length} + copy(field.Name[:], []byte(name)) + return field +} + +// NumberField returns a Field that can be used in SetFields to initialize the +// DBF file. +func NumberField(name string, length uint8) Field { + field := Field{Fieldtype: 'N', Size: length} + copy(field.Name[:], []byte(name)) + return field +} + +// FloatField returns a Field that can be used in SetFields to initialize the +// DBF file. Used to store floating points with precision in the DBF. +func FloatField(name string, length uint8, precision uint8) Field { + field := Field{Fieldtype: 'F', Size: length, Precision: precision} + copy(field.Name[:], []byte(name)) + return field +} + +// DateField feturns a Field that can be used in SetFields to initialize the +// DBF file. Used to store Date strings formatted as YYYYMMDD. Data wise this +// is the same as a StringField with length 8. +func DateField(name string) Field { + field := Field{Fieldtype: 'D', Size: 8} + copy(field.Name[:], []byte(name)) + return field +}*/ diff --git a/server/pkg/shp/shapefile_test.go b/server/pkg/shp/shapefile_test.go new file mode 100644 index 000000000..bfe25738f --- /dev/null +++ b/server/pkg/shp/shapefile_test.go @@ -0,0 +1,196 @@ +package shp + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestBox_ExtendWithPoint(t *testing.T) { + tests := []struct { + name string + input struct { + b Box + p Point + } + expected Box + }{ + { + name: "MaxY", + input: struct { + b Box + p Point + }{ + b: Box{0, 0, 1, 1}, + p: Point{0, 2}}, + expected: Box{ + MinX: 0, + MinY: 0, + MaxX: 1, + MaxY: 2, + }, + }, + { + name: "MaxX", + input: struct { + b Box + p Point + }{ + b: Box{0, 0, 1, 1}, + p: Point{2, 0}}, + expected: Box{ + MinX: 0, + MinY: 0, + MaxX: 2, + MaxY: 1, + }, + }, + { + name: "MinX", + input: struct { + b Box + p Point + }{ + b: Box{0, 0, 1, 1}, + p: Point{-1, 0}}, + expected: Box{ + MinX: -1, + MinY: 0, + MaxX: 1, + MaxY: 1, + }, + }, + { + name: "MinY", + input: struct { + b Box + p Point + }{ + b: Box{0, 0, 1, 1}, + p: Point{0, -1}}, + expected: Box{ + MinX: 0, + MinY: -1, + MaxX: 1, + MaxY: 1, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.b.ExtendWithPoint(tc.input.p) + assert.Equal(t, tc.expected, tc.input.b) + }) + } + +} + +func TestBox_Extend(t *testing.T) { + a := Box{-124.763068, 45.543541, -116.915989, 49.002494} + b := Box{-92.888114, 42.49192, -86.805415, 47.080621} + a.Extend(b) + c := Box{-124.763068, 42.49192, -86.805415, 49.002494} + if a.MinX != c.MinX { + t.Errorf("a.MinX = %v, want %v", a.MinX, c.MinX) + } + if a.MinY != c.MinY { + t.Errorf("a.MinY = %v, want %v", a.MinY, c.MinY) + } + if a.MaxX != c.MaxX { + t.Errorf("a.MaxX = %v, want %v", a.MaxX, c.MaxX) + } + if a.MaxY != c.MaxY { + t.Errorf("a.MaxY = %v, want %v", a.MaxY, c.MaxY) + } +} + +func TestNewPolyLine(t *testing.T) { + points := [][]Point{ + {Point{0.0, 0.0}, Point{5.0, 5.0}}, + {Point{10.0, 10.0}, Point{15.0, 15.0}}, + } + polyLine := NewPolyLine(points) + + expected := &PolyLine{ + Box: Box{MinX: 0, MinY: 0, MaxX: 15, MaxY: 15}, + NumParts: 2, + NumPoints: 4, + Parts: []int32{0, 2}, + Points: []Point{ + {X: 0, Y: 0}, + {X: 5, Y: 5}, + {X: 10, Y: 10}, + {X: 15, Y: 15}, + }, + } + + assert.Equal(t, expected, polyLine) +} + +func TestBBoxFromPoints(t *testing.T) { + tests := []struct { + name string + input []Point + expected Box + }{ + { + name: "Single point", + input: []Point{{ + X: 1, + Y: 1, + }}, + expected: Box{ + MinX: 1, + MinY: 1, + MaxX: 1, + MaxY: 1, + }, + }, + { + name: "Tow points", + input: []Point{{ + X: 1, + Y: 1, + }, { + X: 0, + Y: 0, + }}, + expected: Box{ + MinX: 0, + MinY: 0, + MaxX: 1, + MaxY: 1, + }, + }, + { + name: "Multi points", + input: []Point{{ + X: 2, + Y: 2, + }, { + X: 0, + Y: 0, + }, { + X: 1, + Y: 3, + }}, + expected: Box{ + MinX: 0, + MinY: 0, + MaxX: 2, + MaxY: 3, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, BBoxFromPoints(tc.input)) + }) + } +} diff --git a/server/pkg/shp/shapetype_string.go b/server/pkg/shp/shapetype_string.go new file mode 100644 index 000000000..6b9bf309f --- /dev/null +++ b/server/pkg/shp/shapetype_string.go @@ -0,0 +1,51 @@ +// Code generated by "stringer -type=ShapeType"; DO NOT EDIT. + +package shp + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[NULL-0] + _ = x[POINT-1] + _ = x[POLYLINE-3] + _ = x[POLYGON-5] + _ = x[MULTIPOINT-8] + _ = x[POINTZ-11] + _ = x[POLYLINEZ-13] + _ = x[POLYGONZ-15] + _ = x[MULTIPOINTZ-18] + _ = x[POINTM-21] + _ = x[POLYLINEM-23] + _ = x[POLYGONM-25] + _ = x[MULTIPOINTM-28] + _ = x[MULTIPATCH-31] +} + +const _ShapeType_name = "NULLPOINTPOLYLINEPOLYGONMULTIPOINTPOINTZPOLYLINEZPOLYGONZMULTIPOINTZPOINTMPOLYLINEMPOLYGONMMULTIPOINTMMULTIPATCH" + +var _ShapeType_map = map[ShapeType]string{ + 0: _ShapeType_name[0:4], + 1: _ShapeType_name[4:9], + 3: _ShapeType_name[9:17], + 5: _ShapeType_name[17:24], + 8: _ShapeType_name[24:34], + 11: _ShapeType_name[34:40], + 13: _ShapeType_name[40:49], + 15: _ShapeType_name[49:57], + 18: _ShapeType_name[57:68], + 21: _ShapeType_name[68:74], + 23: _ShapeType_name[74:83], + 25: _ShapeType_name[83:91], + 28: _ShapeType_name[91:102], + 31: _ShapeType_name[102:112], +} + +func (i ShapeType) String() string { + if str, ok := _ShapeType_map[i]; ok { + return str + } + return "ShapeType(" + strconv.FormatInt(int64(i), 10) + ")" +} diff --git a/server/pkg/shp/shapetype_string_test.go b/server/pkg/shp/shapetype_string_test.go new file mode 100644 index 000000000..35494974b --- /dev/null +++ b/server/pkg/shp/shapetype_string_test.go @@ -0,0 +1,99 @@ +package shp + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestShapeType_String(t *testing.T) { + tests := []struct { + name string + input ShapeType + expected string + }{ + { + name: "NULL", + input: 0, + expected: "NULL", + }, + { + name: "POINT", + input: 1, + expected: "POINT", + }, + { + name: "POLYLINE", + input: 3, + expected: "POLYLINE", + }, + { + name: "POLYGON", + input: 5, + expected: "POLYGON", + }, + { + name: "MULTIPOINT", + input: 8, + expected: "MULTIPOINT", + }, + { + name: "POINTZ", + input: 11, + expected: "POINTZ", + }, + { + name: "POLYLINEZ", + input: 13, + expected: "POLYLINEZ", + }, + { + name: "POLYGONZ", + input: 15, + expected: "POLYGONZ", + }, + { + name: "MULTIPOINTZ", + input: 18, + expected: "MULTIPOINTZ", + }, + { + name: "POINTM", + input: 21, + expected: "POINTM", + }, + { + name: "POLYLINEM", + input: 23, + expected: "POLYLINEM", + }, + { + name: "POLYGONM", + input: 25, + expected: "POLYGONM", + }, + { + name: "MULTIPOINTM", + input: 28, + expected: "MULTIPOINTM", + }, + { + name: "MULTIPATCH", + input: 31, + expected: "MULTIPATCH", + }, + { + name: "MULTIPATCH", + input: -1, + expected: "ShapeType(-1)", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.String()) + }) + } +} diff --git a/server/pkg/shp/test_files/empty.zip b/server/pkg/shp/test_files/empty.zip new file mode 100644 index 000000000..acf7655b5 Binary files /dev/null and b/server/pkg/shp/test_files/empty.zip differ diff --git a/server/pkg/shp/test_files/multi.zip b/server/pkg/shp/test_files/multi.zip new file mode 100644 index 000000000..4d79e4318 Binary files /dev/null and b/server/pkg/shp/test_files/multi.zip differ diff --git a/server/pkg/shp/test_files/multipatch.shp b/server/pkg/shp/test_files/multipatch.shp new file mode 100644 index 000000000..0e6c58de2 Binary files /dev/null and b/server/pkg/shp/test_files/multipatch.shp differ diff --git a/server/pkg/shp/test_files/multipoint.shp b/server/pkg/shp/test_files/multipoint.shp new file mode 100644 index 000000000..ee30de9b4 Binary files /dev/null and b/server/pkg/shp/test_files/multipoint.shp differ diff --git a/server/pkg/shp/test_files/multipointm.shp b/server/pkg/shp/test_files/multipointm.shp new file mode 100644 index 000000000..232f560b2 Binary files /dev/null and b/server/pkg/shp/test_files/multipointm.shp differ diff --git a/server/pkg/shp/test_files/multipointz.shp b/server/pkg/shp/test_files/multipointz.shp new file mode 100644 index 000000000..e696e68c7 Binary files /dev/null and b/server/pkg/shp/test_files/multipointz.shp differ diff --git a/server/pkg/shp/test_files/ne_110m_admin_0_countries.zip b/server/pkg/shp/test_files/ne_110m_admin_0_countries.zip new file mode 100644 index 000000000..09d3194f0 Binary files /dev/null and b/server/pkg/shp/test_files/ne_110m_admin_0_countries.zip differ diff --git a/server/pkg/shp/test_files/point.shp b/server/pkg/shp/test_files/point.shp new file mode 100644 index 000000000..310419cbd Binary files /dev/null and b/server/pkg/shp/test_files/point.shp differ diff --git a/server/pkg/shp/test_files/pointm.shp b/server/pkg/shp/test_files/pointm.shp new file mode 100644 index 000000000..7f6216e56 Binary files /dev/null and b/server/pkg/shp/test_files/pointm.shp differ diff --git a/server/pkg/shp/test_files/pointz.shp b/server/pkg/shp/test_files/pointz.shp new file mode 100644 index 000000000..9e7ec168f Binary files /dev/null and b/server/pkg/shp/test_files/pointz.shp differ diff --git a/server/pkg/shp/test_files/polygon.shp b/server/pkg/shp/test_files/polygon.shp new file mode 100644 index 000000000..624a9db63 Binary files /dev/null and b/server/pkg/shp/test_files/polygon.shp differ diff --git a/server/pkg/shp/test_files/polygonm.shp b/server/pkg/shp/test_files/polygonm.shp new file mode 100644 index 000000000..f3a22d12c Binary files /dev/null and b/server/pkg/shp/test_files/polygonm.shp differ diff --git a/server/pkg/shp/test_files/polygonz.shp b/server/pkg/shp/test_files/polygonz.shp new file mode 100644 index 000000000..dcb5f30e1 Binary files /dev/null and b/server/pkg/shp/test_files/polygonz.shp differ diff --git a/server/pkg/shp/test_files/polyline.shp b/server/pkg/shp/test_files/polyline.shp new file mode 100644 index 000000000..938bc5acc Binary files /dev/null and b/server/pkg/shp/test_files/polyline.shp differ diff --git a/server/pkg/shp/test_files/polylinem.shp b/server/pkg/shp/test_files/polylinem.shp new file mode 100644 index 000000000..19a685ca6 Binary files /dev/null and b/server/pkg/shp/test_files/polylinem.shp differ diff --git a/server/pkg/shp/test_files/polylinez.shp b/server/pkg/shp/test_files/polylinez.shp new file mode 100644 index 000000000..972723286 Binary files /dev/null and b/server/pkg/shp/test_files/polylinez.shp differ diff --git a/server/pkg/shp/testdata_test.go b/server/pkg/shp/testdata_test.go new file mode 100644 index 000000000..135826a46 --- /dev/null +++ b/server/pkg/shp/testdata_test.go @@ -0,0 +1,199 @@ +package shp + +import "testing" + +type testFunc func(*testing.T, [][]float64, []Shape) + +var testData = []struct { + name string + points [][]float64 + tester testFunc + shpType ShapeType + count int +}{ + { + name: "test_files/point", + shpType: POINT, + points: [][]float64{ + {10, 10}, + {5, 5}, + {0, 10}, + }, + tester: testPoint, + count: 3, + }, + { + name: "test_files/polyline", + shpType: POLYLINE, + points: [][]float64{ + {0, 0}, + {5, 5}, + {10, 10}, + {15, 15}, + {20, 20}, + {25, 25}, + }, + tester: testPolyLine, + count: 2, + }, + { + name: "test_files/polygon", + shpType: POLYGON, + points: [][]float64{ + {0, 0}, + {0, 5}, + {5, 5}, + {5, 0}, + {0, 0}, + }, + tester: testPolygon, + count: 1, + }, + { + name: "test_files/multipoint", + shpType: MULTIPOINT, + points: [][]float64{ + {10, 10}, + {5, 5}, + {0, 10}, + }, + tester: testMultiPoint, + count: 1, + }, + { + name: "test_files/pointz", + shpType: POINTZ, + points: [][]float64{ + {10, 10, 100}, + {5, 5, 50}, + {0, 10, 75}, + }, + tester: testPointZ, + count: 3, + }, + { + name: "test_files/polylinez", + shpType: POLYLINEZ, + points: [][]float64{ + {0, 0, 0}, + {5, 5, 5}, + {10, 10, 10}, + {15, 15, 15}, + {20, 20, 20}, + {25, 25, 25}, + }, + tester: testPolyLineZ, + count: 2, + }, + { + name: "test_files/polygonz", + shpType: POLYGONZ, + points: [][]float64{ + {0, 0, 0}, + {0, 5, 5}, + {5, 5, 10}, + {5, 0, 15}, + {0, 0, 0}, + }, + tester: testPolygonZ, + count: 1, + }, + { + name: "test_files/multipointz", + shpType: MULTIPOINTZ, + points: [][]float64{ + {10, 10, 100}, + {5, 5, 50}, + {0, 10, 75}, + }, + tester: testMultiPointZ, + count: 1, + }, + { + name: "test_files/pointm", + shpType: POINTM, + points: [][]float64{ + {10, 10, 100}, + {5, 5, 50}, + {0, 10, 75}, + }, + tester: testPointM, + count: 3, + }, + { + name: "test_files/polylinem", + shpType: POLYLINEM, + points: [][]float64{ + {0, 0, 0}, + {5, 5, 5}, + {10, 10, 10}, + {15, 15, 15}, + {20, 20, 20}, + {25, 25, 25}, + }, + tester: testPolyLineM, + count: 2, + }, + { + name: "test_files/polygonm", + shpType: POLYGONM, + points: [][]float64{ + {0, 0, 0}, + {0, 5, 5}, + {5, 5, 10}, + {5, 0, 15}, + {0, 0, 0}, + }, + tester: testPolygonM, + count: 1, + }, + { + name: "test_files/multipointm", + shpType: MULTIPOINTM, + points: [][]float64{ + {10, 10, 100}, + {5, 5, 50}, + {0, 10, 75}, + }, + tester: testMultiPointM, + count: 1, + }, + { + name: "test_files/multipatch", + shpType: MULTIPATCH, + points: [][]float64{ + {0, 0, 0}, + {10, 0, 0}, + {10, 10, 0}, + {0, 10, 0}, + {0, 0, 0}, + {0, 10, 0}, + {0, 10, 10}, + {0, 0, 10}, + {0, 0, 0}, + {0, 10, 0}, + {10, 0, 0}, + {10, 0, 10}, + {10, 10, 10}, + {10, 10, 0}, + {10, 0, 0}, + {0, 0, 0}, + {0, 0, 10}, + {10, 0, 10}, + {10, 0, 0}, + {0, 0, 0}, + {10, 10, 0}, + {10, 10, 10}, + {0, 10, 10}, + {0, 10, 0}, + {10, 10, 0}, + {0, 0, 10}, + {0, 10, 10}, + {10, 10, 10}, + {10, 0, 10}, + {0, 0, 10}, + }, + tester: testMultiPatch, + count: 1, + }, +} diff --git a/server/pkg/shp/writer.go b/server/pkg/shp/writer.go new file mode 100644 index 000000000..4969c6b0e --- /dev/null +++ b/server/pkg/shp/writer.go @@ -0,0 +1,122 @@ +package shp + +import ( + "encoding/binary" + "io" + "math" +) + +// Writer is the type that is used to write a new shapefile. +type Writer struct { + shp io.WriteSeeker + GeometryType ShapeType + num int32 + bbox Box +} + +func CreateFrom(ws io.WriteSeeker, t ShapeType) (*Writer, error) { + _, err := ws.Seek(100, io.SeekStart) + if err != nil { + return nil, err + } + w := &Writer{ + shp: ws, + GeometryType: t, + } + return w, nil +} + +// Write shape to the writer. +// Returns the index of the written object +// which can be used in WriteAttribute. +func (w *Writer) Write(shape Shape) (int32, error) { + // increate bbox + if w.num == 0 { + w.bbox = shape.BBox() + } else { + w.bbox.Extend(shape.BBox()) + } + + w.num++ + err := binary.Write(w.shp, binary.BigEndian, w.num) + if err != nil { + return 0, err + } + _, err = w.shp.Seek(4, io.SeekCurrent) + if err != nil { + return 0, err + } + start, err := w.shp.Seek(0, io.SeekCurrent) + if err != nil { + return 0, err + } + err = binary.Write(w.shp, binary.LittleEndian, w.GeometryType) + if err != nil { + return 0, err + } + err = shape.write(w.shp) + if err != nil { + return 0, err + } + finish, err := w.shp.Seek(0, io.SeekCurrent) + if err != nil { + return 0, err + } + length := int32(math.Floor((float64(finish) - float64(start)) / 2.0)) + _, err = w.shp.Seek(start-4, io.SeekStart) + if err != nil { + return 0, err + } + err = binary.Write(w.shp, binary.BigEndian, length) + if err != nil { + return 0, err + } + _, err = w.shp.Seek(finish, io.SeekStart) + if err != nil { + return 0, err + } + return w.num - 1, nil +} + +// Close closes the writer. +func (w *Writer) Close() error { + return w.writeHeader(w.shp) +} + +// writeHeader writes SHP to ws. +func (w *Writer) writeHeader(ws io.WriteSeeker) error { + filelength, _ := ws.Seek(0, io.SeekEnd) + if filelength == 0 { + filelength = 100 + } + _, err := ws.Seek(0, io.SeekStart) + if err != nil { + return err + } + // file code + err = binary.Write(ws, binary.BigEndian, []int32{9994, 0, 0, 0, 0, 0}) + if err != nil { + return err + } + // file length + err = binary.Write(ws, binary.BigEndian, int32(filelength/2)) + if err != nil { + return err + } + // version and shape type + err = binary.Write(ws, binary.LittleEndian, []int32{1000, int32(w.GeometryType)}) + if err != nil { + return err + } + // bounding box + err = binary.Write(ws, binary.LittleEndian, w.bbox) + if err != nil { + return err + } + // elevation, measure + err = binary.Write(ws, binary.LittleEndian, []float64{0.0, 0.0, 0.0, 0.0}) + if err != nil { + return err + } + return nil +} diff --git a/server/pkg/shp/writer_test.go b/server/pkg/shp/writer_test.go new file mode 100644 index 000000000..26a73fc6d --- /dev/null +++ b/server/pkg/shp/writer_test.go @@ -0,0 +1,153 @@ +package shp + +import ( + "fmt" + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +var filenamePrefix = "test_files/write_" + +func removeShapefile(t *testing.T, filename string) { + _ = os.Remove(filename + ".shp") + // _ = os.Remove(filename + ".shx") + // _ = os.Remove(filename + ".dbf") +} + +func pointsToFloats(points []Point) [][]float64 { + floats := make([][]float64, len(points)) + for k, v := range points { + floats[k] = make([]float64, 2) + floats[k][0] = v.X + floats[k][1] = v.Y + } + return floats +} + +func TestWriter_Write_Point(t *testing.T) { + filename := filenamePrefix + "point" + defer removeShapefile(t, filename) + + points := [][]float64{ + {0.0, 0.0}, + {5.0, 5.0}, + {10.0, 10.0}, + } + + f, err := os.Create(filename + ".shp") + assert.Nil(t, err, "Error open file") + + shape, err := CreateFrom(f, POINT) + assert.Nil(t, err, "Error shp create") + + for _, p := range points { + _, err = shape.Write(&Point{p[0], p[1]}) + assert.Nil(t, err, "Error writing shape") + } + + err = shape.Close() + assert.Nil(t, err) + + err = f.Close() + assert.Nil(t, err) + + shapes := getShapesFromFile(filename, t) + + assert.Equal(t, len(points), len(shapes), "Number of shapes read was wrong") + testPoint(t, points, shapes) +} + +func TestWriter_Write_PolyLine(t *testing.T) { + filename := filenamePrefix + "polyline" + defer removeShapefile(t, filename) + + points := [][]Point{ + {Point{0.0, 0.0}, Point{5.0, 5.0}}, + {Point{10.0, 10.0}, Point{15.0, 15.0}}, + } + + f, _ := os.Create(filename + ".shp") + shape, err := CreateFrom(f, POLYLINE) + assert.Nil(t, err, "Error shp create") + + polyLine := NewPolyLine(points) + + _, err = shape.Write(polyLine) + assert.Nil(t, err) + + err = shape.Close() + assert.Nil(t, err) + + err = f.Close() + assert.Nil(t, err) + + shapes := getShapesFromFile(filename, t) + + assert.Equal(t, 1, len(shapes), "Number of shapes read was wrong") + testPolyLine(t, pointsToFloats(flatten(points)), shapes) +} + +func TestWriter_Close(t *testing.T) { + filename := filenamePrefix + "point" + defer removeShapefile(t, filename) + + points := [][]float64{ + {0.0, 0.0}, + {5.0, 5.0}, + {10.0, 10.0}, + } + + f, err := os.Create(filename + ".shp") + assert.Nil(t, err, "Error open file") + + shape, err := CreateFrom(f, POINT) + assert.Nil(t, err, "Error shp create") + + for _, p := range points { + _, err = shape.Write(&Point{p[0], p[1]}) + assert.Nil(t, err, "Error writing shape") + } + + err = f.Close() + assert.Nil(t, err) + + err = shape.Close() + assert.NotNil(t, err) +} + +func TestWriter(t *testing.T) { + tests := testData + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + shapes := getShapesFromFile(tc.name, t) + assert.Equal(t, tc.count, len(shapes), "Number of shapes for %s read was wrong. Wanted %d, got %d.", tc.name, tc.count, len(shapes)) + + for i, shp := range shapes { + outputPath := tc.name + "_out_" + fmt.Sprint(i) + f, _ := os.Create(outputPath + ".shp") + shape, _ := CreateFrom(f, tc.shpType) + + _, err := shape.Write(shp) + assert.Nil(t, err) + + err = shape.Close() + assert.Nil(t, err) + + err = f.Close() + assert.Nil(t, err) + + shpFromOut := getShapesFromFile(outputPath, t) + assert.Equal(t, shpFromOut[0], shp) + + removeShapefile(t, outputPath) + } + tc.tester(t, tc.points, shapes) + }) + } + +} diff --git a/server/pkg/shp/zipreader.go b/server/pkg/shp/zipreader.go new file mode 100644 index 000000000..7c7448230 --- /dev/null +++ b/server/pkg/shp/zipreader.go @@ -0,0 +1,107 @@ +package shp + +import ( + "archive/zip" + "bytes" + "fmt" + "io" + "strings" +) + +// ZipReader provides an interface for reading Shapefiles that are compressed in a ZIP archive. +type ZipReader struct { + sr SequentialReader + z *zip.Reader +} + +// openFromZIP is convenience function for opening the file called name that is +// compressed in z for reading. +func openFromZIP(z *zip.Reader, name string) (io.ReadCloser, error) { + for _, f := range z.File { + if f.Name == name { + return f.Open() + + } + } + return nil, fmt.Errorf("No such file in archive: %s", name) +} + +// ReadZipFrom read zip file from io.Reader, zip file must contain only one shape file +func ReadZipFrom(r io.Reader) (*ZipReader, error) { + zipBytes, err := io.ReadAll(r) + if err != nil { + return nil, err + } + reader, err := zip.NewReader(bytes.NewReader(zipBytes), int64(len(zipBytes))) + if err != nil { + return nil, err + } + zr := &ZipReader{ + z: reader, + } + shapeFiles := shapesInZip(reader) + if len(shapeFiles) == 0 { + return nil, fmt.Errorf("archive does not contain a .shp file") + } + if len(shapeFiles) > 1 { + return nil, fmt.Errorf("archive does contain multiple .shp files") + } + shp, err := openFromZIP(zr.z, shapeFiles[0].Name) + if err != nil { + return nil, err + } + /* Note: not used + withoutExt := strings.TrimSuffix(shapeFiles[0].Name, ".shp") + // dbf is optional, so no error checking here + dbf, _ := openFromZIP(zr.z, withoutExt+".dbf")*/ + zr.sr = SequentialReaderFromExt(shp /*, dbf*/) + return zr, nil +} + +func shapesInZip(z *zip.Reader) []*zip.File { + var shapeFiles []*zip.File + for _, f := range z.File { + if strings.HasSuffix(f.Name, ".shp") { + shapeFiles = append(shapeFiles, f) + } + } + return shapeFiles +} + +// Close closes the ZipReader and frees the allocated resources. +func (zr *ZipReader) Close() error { + err := zr.sr.Close() + if err != nil { + return err + } + return nil +} + +// Next reads the next shape in the shapefile and the next row in the DBF. Call +// Shape() and Attribute() to access the values. +func (zr *ZipReader) Next() bool { + return zr.sr.Next() +} + +// Shape returns the shape that was last read as well as the current index. +func (zr *ZipReader) Shape() (int, Shape) { + return zr.sr.Shape() +} + +/* Note: not used +// Attribute returns the n-th field of the last row that was read. If there +// were any errors before, the empty string is returned. +func (zr *ZipReader) Attribute(n int) string { + return zr.sr.Attribute(n) +} + +// Fields returns a slice of Fields that are present in the +// DBF table. +func (zr *ZipReader) Fields() []Field { + return zr.sr.Fields() +}*/ + +// Err returns the last non-EOF error that was encountered by this ZipReader. +func (zr *ZipReader) Err() error { + return zr.sr.Err() +} diff --git a/server/pkg/shp/zipreader_test.go b/server/pkg/shp/zipreader_test.go new file mode 100644 index 000000000..a72616564 --- /dev/null +++ b/server/pkg/shp/zipreader_test.go @@ -0,0 +1,84 @@ +package shp + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestReadZipFrom(t *testing.T) { + p := "test_files/ne_110m_admin_0_countries.zip" + + ior, err := os.Open(p) + assert.Nil(t, err) + defer func() { + err := ior.Close() + assert.Nil(t, err) + }() + + zr, err := ReadZipFrom(ior) + assert.Nil(t, err) + defer func() { + err := zr.Close() + assert.Nil(t, err) + }() + + var shps []Shape + for zr.Next() { + _, shp := zr.Shape() + shps = append(shps, shp) + } + assert.Nil(t, zr.Err()) + assert.Equal(t, 177, len(shps)) +} + +func TestReadZipFromWrongScenarios(t *testing.T) { + tests := []struct { + name string + input string + }{ + { + name: "ReadZipFromWrongFile", + input: "test_files/point.shp", + }, + { + name: "ReadZipFromEmptyZip", + input: "test_files/empty.zip", + }, + { + name: "ReadZipFromMultiZip", + input: "test_files/multi.zip", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ior, err := os.Open(tc.input) + assert.Nil(t, err) + defer func() { + err := ior.Close() + assert.Nil(t, err) + }() + + _, err = ReadZipFrom(ior) + assert.NotNil(t, err) + }) + } +} + +func TestReadZipFromClosedReader(t *testing.T) { + p := "test_files/point.shp" + + ior, err := os.Open(p) + assert.Nil(t, err) + + err = ior.Close() + assert.Nil(t, err) + + _, err = ReadZipFrom(ior) + assert.NotNil(t, err) +} diff --git a/server/pkg/tag/group.go b/server/pkg/tag/group.go new file mode 100644 index 000000000..138e650d4 --- /dev/null +++ b/server/pkg/tag/group.go @@ -0,0 +1,27 @@ +package tag + +type Group struct { + tag + tags IDList +} + +func (g *Group) Tags() IDList { + if g == nil { + return nil + } + return g.tags.Clone() +} + +func (g *Group) RemoveTag(ids ...ID) { + if g == nil { + return + } + g.tags = g.tags.Delete(ids...) +} + +func (g *Group) AddTag(ids ...ID) { + if g == nil { + return + } + g.tags = g.tags.Add(ids...) +} diff --git a/server/pkg/tag/group_builder.go b/server/pkg/tag/group_builder.go new file mode 100644 index 000000000..e84d28f3e --- /dev/null +++ b/server/pkg/tag/group_builder.go @@ -0,0 +1,63 @@ +package tag + +type GroupBuilder struct { + g *Group +} + +func NewGroup() *GroupBuilder { + return &GroupBuilder{g: &Group{}} +} + +func GroupFrom(t Tag) *Group { + li, ok := t.(*Group) + if !ok { + return nil + } + return li +} + +func (b *GroupBuilder) Build() (*Group, error) { + if b.g.id.IsNil() { + return nil, ErrInvalidID + } + if b.g.sceneId.IsNil() { + return nil, ErrInvalidSceneID + } + if b.g.label == "" { + return nil, ErrEmptyLabel + } + return b.g, nil +} + +func (b *GroupBuilder) MustBuild() *Group { + res, err := b.Build() + if err != nil { + panic(err) + } + return res +} + +func (b *GroupBuilder) ID(tid ID) *GroupBuilder { + b.g.id = tid + return b +} + +func (b *GroupBuilder) NewID() *GroupBuilder { + b.g.id = NewID() + return b +} + +func (b *GroupBuilder) Label(l string) *GroupBuilder { + b.g.label = l + return b +} + +func (b *GroupBuilder) Scene(sid SceneID) *GroupBuilder { + b.g.sceneId = sid + return b +} + +func (b *GroupBuilder) Tags(tl IDList) *GroupBuilder { + b.g.tags = tl.Clone() + return b +} diff --git a/server/pkg/tag/group_test.go b/server/pkg/tag/group_test.go new file mode 100644 index 000000000..d9b1281dc --- /dev/null +++ b/server/pkg/tag/group_test.go @@ -0,0 +1,162 @@ +package tag + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + + "github.com/stretchr/testify/assert" +) + +var _ Tag = &Group{} + +func TestGroupBuilder_NewID(t *testing.T) { + b := NewGroup().NewID() + assert.NotEqual(t, ID{}, b.g.id) +} + +func TestGroupBuilder_Build(t *testing.T) { + tid := NewID() + sid := NewSceneID() + tags := IDList{ + NewID(), + NewID(), + } + + tests := []struct { + Name, Label string + Id ID + Scene SceneID + Tags IDList + Expected struct { + Group Group + Error error + } + }{ + { + Name: "fail: nil tag ID", + Label: "xxx", + Scene: NewSceneID(), + Expected: struct { + Group Group + Error error + }{ + Error: ErrInvalidID, + }, + }, + { + Name: "fail: empty label", + Id: NewID(), + Scene: NewSceneID(), + Expected: struct { + Group Group + Error error + }{ + Error: ErrEmptyLabel, + }, + }, + { + Name: "fail: nil scene ID", + Label: "xxx", + Id: NewID(), + Expected: struct { + Group Group + Error error + }{ + Error: ErrInvalidSceneID, + }, + }, + { + Name: "success", + Id: tid, + Label: "xxx", + Scene: sid, + Tags: tags, + Expected: struct { + Group Group + Error error + }{ + Group: Group{ + tag: tag{ + id: tid, + label: "xxx", + sceneId: sid, + }, + tags: tags, + }, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res, err := NewGroup(). + ID(tc.Id). + Scene(tc.Scene). + Label(tc.Label). + Tags(tc.Tags). + Build() + if tc.Expected.Error == nil { + assert.Equal(t, tc.Expected.Group.ID(), res.ID()) + assert.Equal(t, tc.Expected.Group.Scene(), res.Scene()) + assert.Equal(t, tc.Expected.Group.Label(), res.Label()) + assert.Equal(t, tc.Expected.Group.Tags(), res.Tags()) + } else { + assert.Equal(t, tc.Expected.Error, err) + } + }) + } +} + +func TestGroup_AddTag(t *testing.T) { + sid := id.NewSceneID() + tid := id.NewTagID() + tests := []struct { + name string + tag *Group + input IDList + expected IDList + }{ + { + name: "should add a tag", + tag: NewGroup().NewID().Scene(sid).Label("foo").MustBuild(), + input: IDList{tid}, + expected: IDList{tid}, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tc.tag.AddTag(tc.input...) + assert.Equal(tt, tc.tag.tags, tc.expected) + }) + } +} + +func TestGroup_RemoveTag(t *testing.T) { + sid := id.NewSceneID() + tid := id.NewTagID() + tid2 := id.NewTagID() + tests := []struct { + name string + tag *Group + input IDList + expected IDList + }{ + { + name: "should remove a tag", + tag: NewGroup().NewID().Scene(sid).Label("foo").Tags(IDList{tid, tid2}).MustBuild(), + input: IDList{tid2}, + expected: IDList{tid}, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tc.tag.RemoveTag(tc.input...) + assert.Equal(tt, tc.tag.tags, tc.expected) + }) + } +} diff --git a/server/pkg/tag/id.go b/server/pkg/tag/id.go new file mode 100644 index 000000000..d8f606cc1 --- /dev/null +++ b/server/pkg/tag/id.go @@ -0,0 +1,37 @@ +package tag + +import "github.com/reearth/reearth-backend/pkg/id" + +type ID = id.TagID +type SceneID = id.SceneID +type DatasetID = id.DatasetID +type DatasetSchemaID = id.DatasetSchemaID +type DatasetFieldID = id.DatasetFieldID + +type IDList = id.TagIDList + +var NewID = id.NewTagID +var NewSceneID = id.NewSceneID +var NewDatasetID = id.NewDatasetID +var NewDatasetSchemaID = id.NewDatasetSchemaID +var NewDatasetFieldID = id.NewDatasetFieldID + +var MustID = id.MustTagID +var MustSceneID = id.MustSceneID +var MustDatasetID = id.MustDatasetID +var MustDatasetSchemaID = id.MustDatasetSchemaID +var MustDatasetFieldID = id.MustDatasetFieldID + +var IDFrom = id.TagIDFrom +var SceneIDFrom = id.SceneIDFrom +var DatasetIDFrom = id.DatasetIDFrom +var DatasetSchemaIDFrom = id.DatasetSchemaIDFrom +var DatasetFieldIDFrom = id.DatasetFieldIDFrom + +var IDFromRef = id.TagIDFromRef +var SceneIDFromRef = id.SceneIDFromRef +var DatasetIDFromRef = id.DatasetIDFromRef +var DatasetSchemaIDFromRef = id.DatasetSchemaIDFromRef +var DatasetFieldIDFromRef = id.DatasetFieldIDFromRef + +var ErrInvalidID = id.ErrInvalidID diff --git a/server/pkg/tag/item.go b/server/pkg/tag/item.go new file mode 100644 index 000000000..696dcaf0a --- /dev/null +++ b/server/pkg/tag/item.go @@ -0,0 +1,44 @@ +package tag + +type Item struct { + tag + parent *ID + linkedDatasetFieldID *DatasetFieldID + linkedDatasetID *DatasetID + linkedDatasetSchemaID *DatasetSchemaID +} + +func (i *Item) Parent() *ID { + if i == nil { + return nil + } + return i.parent.CopyRef() +} + +func (i *Item) LinkedDatasetFieldID() *DatasetFieldID { + if i == nil { + return nil + } + return i.linkedDatasetFieldID.CopyRef() +} + +func (i *Item) LinkedDatasetID() *DatasetID { + if i == nil { + return nil + } + return i.linkedDatasetID.CopyRef() +} + +func (i *Item) LinkedDatasetSchemaID() *DatasetSchemaID { + if i == nil { + return nil + } + return i.linkedDatasetSchemaID.CopyRef() +} + +func (i *Item) SetParent(p *ID) { + if i == nil { + return + } + i.parent = p.CopyRef() +} diff --git a/server/pkg/tag/item_builder.go b/server/pkg/tag/item_builder.go new file mode 100644 index 000000000..29eb8790a --- /dev/null +++ b/server/pkg/tag/item_builder.go @@ -0,0 +1,78 @@ +package tag + +type ItemBuilder struct { + i *Item +} + +func NewItem() *ItemBuilder { + return &ItemBuilder{i: &Item{}} +} + +func ItemFrom(t Tag) *Item { + li, ok := t.(*Item) + if !ok { + return nil + } + return li +} + +func (b *ItemBuilder) Build() (*Item, error) { + if b.i.id.IsNil() { + return nil, ErrInvalidID + } + if b.i.sceneId.IsNil() { + return nil, ErrInvalidSceneID + } + if b.i.label == "" { + return nil, ErrEmptyLabel + } + return b.i, nil +} + +func (b *ItemBuilder) MustBuild() *Item { + res, err := b.Build() + if err != nil { + panic(err) + } + return res +} + +func (b *ItemBuilder) ID(tid ID) *ItemBuilder { + b.i.id = tid + return b +} + +func (b *ItemBuilder) NewID() *ItemBuilder { + b.i.id = NewID() + return b +} + +func (b *ItemBuilder) Label(l string) *ItemBuilder { + b.i.label = l + return b +} + +func (b *ItemBuilder) Scene(sid SceneID) *ItemBuilder { + b.i.sceneId = sid + return b +} + +func (b *ItemBuilder) Parent(p *ID) *ItemBuilder { + b.i.parent = p.CopyRef() + return b +} + +func (b *ItemBuilder) LinkedDatasetFieldID(dfid *DatasetFieldID) *ItemBuilder { + b.i.linkedDatasetFieldID = dfid + return b +} + +func (b *ItemBuilder) LinkedDatasetID(did *DatasetID) *ItemBuilder { + b.i.linkedDatasetID = did + return b +} + +func (b *ItemBuilder) LinkedDatasetSchemaID(dsid *DatasetSchemaID) *ItemBuilder { + b.i.linkedDatasetSchemaID = dsid + return b +} diff --git a/server/pkg/tag/item_test.go b/server/pkg/tag/item_test.go new file mode 100644 index 000000000..ecee16594 --- /dev/null +++ b/server/pkg/tag/item_test.go @@ -0,0 +1,118 @@ +package tag + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +var _ Tag = &Item{} + +func TestItemBuilder_NewID(t *testing.T) { + b := NewItem().NewID() + assert.NotEqual(t, ID{}, b.i.id) +} + +func TestItemBuilder_Build(t *testing.T) { + tid := NewID() + sid := NewSceneID() + dfid := NewDatasetFieldID() + did := NewDatasetID() + dsid := NewDatasetSchemaID() + + tests := []struct { + Name, Label string + Id ID + Scene SceneID + LinkedDatasetFieldID *DatasetFieldID + LinkedDatasetID *DatasetID + LinkedDatasetSchemaID *DatasetSchemaID + Expected struct { + Item Item + Error error + } + }{ + { + Name: "fail: nil tag ID", + Label: "xxx", + Scene: NewSceneID(), + Expected: struct { + Item Item + Error error + }{ + Error: ErrInvalidID, + }, + }, + { + Name: "fail: empty label", + Id: NewID(), + Scene: NewSceneID(), + Expected: struct { + Item Item + Error error + }{ + Error: ErrEmptyLabel, + }, + }, + { + Name: "fail: nil scene ID", + Label: "xxx", + Id: NewID(), + Expected: struct { + Item Item + Error error + }{ + Error: ErrInvalidSceneID, + }, + }, + { + Name: "success", + Label: "xxx", + Id: tid, + Scene: sid, + LinkedDatasetFieldID: dfid.Ref(), + LinkedDatasetID: did.Ref(), + LinkedDatasetSchemaID: dsid.Ref(), + Expected: struct { + Item Item + Error error + }{ + Item: Item{ + tag: tag{ + id: tid, + label: "xxx", + sceneId: sid, + }, + linkedDatasetFieldID: dfid.Ref(), + linkedDatasetID: did.Ref(), + linkedDatasetSchemaID: dsid.Ref(), + }, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res, err := NewItem(). + ID(tc.Id). + Scene(tc.Scene). + Label(tc.Label). + LinkedDatasetSchemaID(tc.LinkedDatasetSchemaID). + LinkedDatasetID(tc.LinkedDatasetID). + LinkedDatasetFieldID(tc.LinkedDatasetFieldID). + Build() + if tc.Expected.Error == nil { + assert.Equal(t, tc.Expected.Item.ID(), res.ID()) + assert.Equal(t, tc.Expected.Item.Scene(), res.Scene()) + assert.Equal(t, tc.Expected.Item.Label(), res.Label()) + assert.Equal(t, tc.Expected.Item.LinkedDatasetFieldID(), res.LinkedDatasetFieldID()) + assert.Equal(t, tc.Expected.Item.LinkedDatasetSchemaID(), res.LinkedDatasetSchemaID()) + assert.Equal(t, tc.Expected.Item.LinkedDatasetID(), res.LinkedDatasetID()) + } else { + assert.Equal(t, tc.Expected.Error, err) + } + }) + } +} diff --git a/server/pkg/tag/list.go b/server/pkg/tag/list.go new file mode 100644 index 000000000..a4b0535aa --- /dev/null +++ b/server/pkg/tag/list.go @@ -0,0 +1,97 @@ +package tag + +type List []Tag + +func DerefList(tags []*Tag) List { + res := make(List, 0, len(tags)) + for _, t := range tags { + if t == nil { + continue + } + res = append(res, *t) + } + return res +} + +func (l List) Items() (res []*Item) { + if len(l) == 0 { + return + } + + res = make([]*Item, 0, len(l)) + for _, t := range l { + if g := ItemFrom(t); g != nil { + res = append(res, g) + } + } + + return res +} + +func (l List) Groups() (res []*Group) { + if len(l) == 0 { + return + } + + res = make([]*Group, 0, len(l)) + for _, t := range l { + if g := GroupFrom(t); g != nil { + res = append(res, g) + } + } + + return res +} + +func (l List) FilterByScene(s SceneID) (res List) { + if len(l) == 0 { + return + } + + res = make(List, 0, len(l)) + for _, t := range l { + if t.Scene() == s { + res = append(res, t) + } + } + + return res +} + +func (l List) Roots() (res List) { + if len(l) == 0 { + return + } + + groups := l.Groups() + for _, t := range l { + found := false + for _, u := range groups { + if t.ID() == u.ID() { + continue + } + if u.Tags().Has(t.ID()) { + found = true + } + } + if !found { + res = append(res, t) + } + } + + return res +} + +func (l List) Refs() (res []*Tag) { + if len(l) == 0 { + return + } + + res = make([]*Tag, 0, len(l)) + for _, t := range l { + t := t + res = append(res, &t) + } + + return res +} diff --git a/server/pkg/tag/list_test.go b/server/pkg/tag/list_test.go new file mode 100644 index 000000000..19ccadb1e --- /dev/null +++ b/server/pkg/tag/list_test.go @@ -0,0 +1,82 @@ +package tag + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestList_Items(t *testing.T) { + sceneID := NewSceneID() + sceneID2 := NewSceneID() + tag1 := NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() + tag2 := NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() + tag3 := NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() + tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDList{ + tag1.ID(), tag2.ID(), + }).MustBuild() + tags := List{tag1, tag2, tag3, tag4} + + assert.Equal(t, []*Item{tag1, tag2, tag3}, tags.Items()) + assert.Nil(t, List(nil).Items()) +} + +func TestList_Groups(t *testing.T) { + sceneID := NewSceneID() + sceneID2 := NewSceneID() + tag1 := NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() + tag2 := NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() + tag3 := NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() + tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDList{ + tag1.ID(), tag2.ID(), + }).MustBuild() + tags := List{tag1, tag2, tag3, tag4} + + assert.Equal(t, []*Group{tag4}, tags.Groups()) + assert.Nil(t, List(nil).Groups()) +} + +func TestList_FilterByScene(t *testing.T) { + sceneID := NewSceneID() + sceneID2 := NewSceneID() + tag1 := NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() + tag2 := NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() + tag3 := NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() + tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDList{ + tag1.ID(), tag2.ID(), + }).MustBuild() + tags := List{tag1, tag2, tag3, tag4} + + assert.Equal(t, List{tag1, tag2, tag4}, tags.FilterByScene(sceneID)) + assert.Nil(t, List(nil).FilterByScene(sceneID)) +} + +func TestList_Roots(t *testing.T) { + sceneID := NewSceneID() + sceneID2 := NewSceneID() + tag1 := NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() + tag2 := NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() + tag3 := NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() + tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDList{ + tag1.ID(), tag2.ID(), + }).MustBuild() + tags := List{tag1, tag2, tag3, tag4} + + assert.Equal(t, List{tag3, tag4}, tags.Roots()) + assert.Nil(t, List(nil).Roots()) +} + +func TestList_Refs(t *testing.T) { + sceneID := NewSceneID() + sceneID2 := NewSceneID() + var tag1 Tag = NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() + var tag2 Tag = NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() + var tag3 Tag = NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() + var tag4 Tag = NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDList{ + tag1.ID(), tag2.ID(), + }).MustBuild() + tags := List{tag1, tag2, tag3, tag4} + + assert.Equal(t, []*Tag{&tag1, &tag2, &tag3, &tag4}, tags.Refs()) + assert.Nil(t, List(nil).Refs()) +} diff --git a/server/pkg/tag/loader.go b/server/pkg/tag/loader.go new file mode 100644 index 000000000..b90b3511e --- /dev/null +++ b/server/pkg/tag/loader.go @@ -0,0 +1,46 @@ +package tag + +import "context" + +type Loader func(context.Context, ...ID) ([]*Tag, error) +type SceneLoader func(context.Context, SceneID) ([]*Tag, error) + +func LoaderFrom(data List) Loader { + return func(ctx context.Context, ids ...ID) ([]*Tag, error) { + res := make([]*Tag, 0, len(ids)) + for _, i := range ids { + found := false + for _, d := range data { + if i == d.ID() { + res = append(res, &d) + found = true + break + } + } + if !found { + res = append(res, nil) + } + } + return res, nil + } +} + +func LoaderFromMap(data map[ID]Tag) Loader { + return func(ctx context.Context, ids ...ID) ([]*Tag, error) { + res := make([]*Tag, 0, len(ids)) + for _, i := range ids { + if d, ok := data[i]; ok { + res = append(res, &d) + } else { + res = append(res, nil) + } + } + return res, nil + } +} + +func SceneLoaderFrom(data List) SceneLoader { + return func(ctx context.Context, id SceneID) ([]*Tag, error) { + return data.FilterByScene(id).Refs(), nil + } +} diff --git a/server/pkg/tag/map.go b/server/pkg/tag/map.go new file mode 100644 index 000000000..26371e388 --- /dev/null +++ b/server/pkg/tag/map.go @@ -0,0 +1,48 @@ +package tag + +import "sort" + +type Map map[ID]Tag + +func (m Map) All() List { + if m == nil || len(m) == 0 { + return nil + } + res := make(List, 0, len(m)) + for _, t := range m { + res = append(res, t) + } + sort.SliceStable(res, func(i, j int) bool { + return res[i].ID().Compare(res[j].ID()) < 0 + }) + return res +} + +func MapFromList(tags []Tag) Map { + res := make(Map) + for _, t := range tags { + if t == nil { + continue + } + + res[t.ID()] = t + } + return res +} + +func MapFromRefList(tags []*Tag) Map { + res := make(Map) + for _, t := range tags { + if t == nil { + continue + } + + t2 := *t + if t2 == nil { + continue + } + + res[t2.ID()] = t2 + } + return res +} diff --git a/server/pkg/tag/tag.go b/server/pkg/tag/tag.go new file mode 100644 index 000000000..166c2ea74 --- /dev/null +++ b/server/pkg/tag/tag.go @@ -0,0 +1,53 @@ +package tag + +import ( + "errors" +) + +var ( + ErrEmptyLabel = errors.New("tag label can't be empty") + ErrInvalidSceneID = errors.New("invalid scene ID") +) + +type tag struct { + id ID + label string + sceneId SceneID +} + +type Tag interface { + ID() ID + Scene() SceneID + Label() string + Rename(string) +} + +func (t *tag) ID() ID { + return t.id +} + +func (t *tag) Scene() SceneID { + return t.sceneId +} + +func (t *tag) Label() string { + return t.label +} + +func (t *tag) Rename(s string) { + t.label = s +} + +func ToTagGroup(t Tag) *Group { + if tg, ok := t.(*Group); ok { + return tg + } + return nil +} + +func ToTagItem(t Tag) *Item { + if ti, ok := t.(*Item); ok { + return ti + } + return nil +} diff --git a/server/pkg/tag/tag_test.go b/server/pkg/tag/tag_test.go new file mode 100644 index 000000000..d99abc142 --- /dev/null +++ b/server/pkg/tag/tag_test.go @@ -0,0 +1,33 @@ +package tag + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestToTagGroup(t *testing.T) { + tag := Item{} + group := ToTagGroup(&tag) + assert.Nil(t, group) + tag2 := Group{} + group2 := ToTagGroup(&tag2) + assert.NotNil(t, group2) +} + +func TestToTagItem(t *testing.T) { + tag := Group{} + item := ToTagItem(&tag) + assert.Nil(t, item) + tag2 := Item{} + item2 := ToTagItem(&tag2) + assert.NotNil(t, item2) +} + +func TestTag_Rename(t *testing.T) { + tt := tag{ + label: "xxx", + } + tt.Rename("changed") + assert.Equal(t, "changed", tt.Label()) +} diff --git a/server/pkg/user/auth.go b/server/pkg/user/auth.go new file mode 100644 index 000000000..a20ed8990 --- /dev/null +++ b/server/pkg/user/auth.go @@ -0,0 +1,34 @@ +package user + +import ( + "strings" +) + +type Auth struct { + Provider string + Sub string +} + +func AuthFromAuth0Sub(sub string) Auth { + s := strings.SplitN(sub, "|", 2) + if len(s) != 2 { + return Auth{Provider: "", Sub: sub} + } + return Auth{Provider: s[0], Sub: sub} +} + +func (a Auth) IsAuth0() bool { + return a.Provider == "auth0" +} + +func (a Auth) Ref() *Auth { + a2 := a + return &a2 +} + +func GenReearthSub(userID string) *Auth { + return &Auth{ + Provider: "reearth", + Sub: "reearth|" + userID, + } +} diff --git a/server/pkg/user/auth_test.go b/server/pkg/user/auth_test.go new file mode 100644 index 000000000..83682003c --- /dev/null +++ b/server/pkg/user/auth_test.go @@ -0,0 +1,105 @@ +package user + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + + "github.com/stretchr/testify/assert" +) + +func TestAuthFromAuth0Sub(t *testing.T) { + tests := []struct { + Name, Sub string + Expected Auth + }{ + { + Name: "with provider", + Sub: "xx|yy", + Expected: Auth{ + Provider: "xx", + Sub: "xx|yy", + }, + }, + { + Name: "without provider", + Sub: "yy", + Expected: Auth{ + Provider: "", + Sub: "yy", + }, + }, + { + Name: "empty", + Sub: "", + Expected: Auth{}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, AuthFromAuth0Sub(tc.Sub)) + }) + } +} + +func TestAuth_IsAuth0(t *testing.T) { + tests := []struct { + Name string + Auth Auth + Expected bool + }{ + { + Name: "is Auth", + Auth: Auth{ + Provider: "auth0", + Sub: "xxx", + }, + Expected: true, + }, + { + Name: "is not Auth", + Auth: Auth{ + Provider: "foo", + Sub: "hoge", + }, + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.Auth.IsAuth0()) + }) + } +} + +func TestGenReearthSub(t *testing.T) { + uid := id.NewUserID() + + tests := []struct { + name string + input string + want *Auth + }{ + { + name: "should return reearth sub", + input: uid.String(), + want: &Auth{ + Provider: "reearth", + Sub: "reearth|" + uid.String(), + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := GenReearthSub(tt.input) + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/server/pkg/user/builder.go b/server/pkg/user/builder.go new file mode 100644 index 000000000..c3fa6129a --- /dev/null +++ b/server/pkg/user/builder.go @@ -0,0 +1,110 @@ +package user + +import ( + "golang.org/x/text/language" +) + +type Builder struct { + u *User + passwordText string + email string +} + +func New() *Builder { + return &Builder{u: &User{}} +} + +func (b *Builder) Build() (*User, error) { + if b.u.id.IsNil() { + return nil, ErrInvalidID + } + if b.u.theme == "" { + b.u.theme = ThemeDefault + } + if b.passwordText != "" { + if err := b.u.SetPassword(b.passwordText); err != nil { + return nil, err + } + } + if err := b.u.UpdateEmail(b.email); err != nil { + return nil, err + } + return b.u, nil +} + +func (b *Builder) MustBuild() *User { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *Builder) ID(id ID) *Builder { + b.u.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.u.id = NewID() + return b +} + +func (b *Builder) Name(name string) *Builder { + b.u.name = name + return b +} + +func (b *Builder) Email(email string) *Builder { + b.email = email + return b +} + +func (b *Builder) EncodedPassword(p EncodedPassword) *Builder { + b.u.password = p.Clone() + return b +} + +func (b *Builder) PasswordPlainText(p string) *Builder { + b.passwordText = p + return b +} + +func (b *Builder) Team(team TeamID) *Builder { + b.u.team = team + return b +} + +func (b *Builder) Lang(lang language.Tag) *Builder { + b.u.lang = lang + return b +} + +func (b *Builder) Theme(t Theme) *Builder { + b.u.theme = t + return b +} + +func (b *Builder) LangFrom(lang string) *Builder { + if lang == "" { + b.u.lang = language.Tag{} + } else if l, err := language.Parse(lang); err == nil { + b.u.lang = l + } + return b +} + +func (b *Builder) Auths(auths []Auth) *Builder { + b.u.auths = append([]Auth{}, auths...) + return b +} + +func (b *Builder) PasswordReset(pr *PasswordReset) *Builder { + b.u.passwordReset = pr + return b +} + +func (b *Builder) Verification(v *Verification) *Builder { + b.u.verification = v + return b +} diff --git a/server/pkg/user/builder_test.go b/server/pkg/user/builder_test.go new file mode 100644 index 000000000..de88a84b7 --- /dev/null +++ b/server/pkg/user/builder_test.go @@ -0,0 +1,288 @@ +package user + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + "golang.org/x/text/language" +) + +func TestBuilder_ID(t *testing.T) { + uid := NewID() + b := New().ID(uid).Email("aaa@bbb.com").MustBuild() + assert.Equal(t, uid, b.ID()) + assert.Nil(t, b.passwordReset) +} + +func TestBuilder_Name(t *testing.T) { + b := New().NewID().Name("xxx").Email("aaa@bbb.com").MustBuild() + assert.Equal(t, "xxx", b.Name()) +} + +func TestBuilder_NewID(t *testing.T) { + b := New().NewID().Email("aaa@bbb.com").MustBuild() + assert.NotNil(t, b.ID()) +} + +func TestBuilder_Team(t *testing.T) { + tid := NewTeamID() + b := New().NewID().Email("aaa@bbb.com").Team(tid).MustBuild() + assert.Equal(t, tid, b.Team()) +} + +func TestBuilder_Auths(t *testing.T) { + b := New().NewID().Email("aaa@bbb.com").Auths([]Auth{ + { + Provider: "xxx", + Sub: "aaa", + }, + }).MustBuild() + assert.Equal(t, []Auth{ + { + Provider: "xxx", + Sub: "aaa", + }, + }, b.Auths()) +} + +func TestBuilder_Email(t *testing.T) { + b := New().NewID().Email("xx@yy.zz").MustBuild() + assert.Equal(t, "xx@yy.zz", b.Email()) +} + +func TestBuilder_Lang(t *testing.T) { + l := language.Make("en") + b := New().NewID().Email("aaa@bbb.com").Lang(l).MustBuild() + assert.Equal(t, l, b.Lang()) +} + +func TestBuilder_LangFrom(t *testing.T) { + tests := []struct { + Name, Lang string + Expected language.Tag + }{ + { + Name: "success creating language", + Lang: "en", + Expected: language.Make("en"), + }, + { + Name: "empty language and empty tag", + Lang: "", + Expected: language.Tag{}, + }, + { + Name: "empty tag of parse err", + Lang: "xxxxxxxxxxx", + Expected: language.Tag{}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + b := New().NewID().Email("aaa@bbb.com").LangFrom(tc.Lang).MustBuild() + assert.Equal(t, tc.Expected, b.Lang()) + }) + } +} + +func TestNew(t *testing.T) { + b := New() + assert.NotNil(t, b) + assert.IsType(t, &Builder{}, b) +} + +func TestBuilder_Build(t *testing.T) { + // bcrypt is not suitable for unit tests as it requires heavy computation + DefaultPasswordEncoder = &NoopPasswordEncoder{} + + uid := NewID() + tid := NewTeamID() + pass := MustEncodedPassword("abcDEF0!") + + type args struct { + Name, Lang, Email string + ID ID + Team TeamID + Auths []Auth + PasswordBin []byte + } + + tests := []struct { + Name string + Args args + Expected *User + Err error + }{ + { + Name: "Success build user", + Args: args{ + Name: "xxx", + Email: "xx@yy.zz", + Lang: "en", + ID: uid, + Team: tid, + PasswordBin: pass, + Auths: []Auth{ + { + Provider: "ppp", + Sub: "sss", + }, + }, + }, + Expected: &User{ + id: uid, + team: tid, + email: "xx@yy.zz", + name: "xxx", + password: pass, + auths: []Auth{{Provider: "ppp", Sub: "sss"}}, + lang: language.English, + theme: ThemeDefault, + }, + }, { + Name: "failed invalid id", + Expected: nil, + Err: ErrInvalidID, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := New(). + ID(tt.Args.ID). + EncodedPassword(pass). + Name(tt.Args.Name). + Auths(tt.Args.Auths). + LangFrom(tt.Args.Lang). + Email(tt.Args.Email). + Team(tt.Args.Team). + Build() + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) + } else { + assert.Equal(t, tt.Err, err) + } + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + // bcrypt is not suitable for unit tests as it requires heavy computation + DefaultPasswordEncoder = &NoopPasswordEncoder{} + + uid := NewID() + tid := NewTeamID() + pass := MustEncodedPassword("abcDEF0!") + + type args struct { + Name, Lang, Email string + ID ID + Team TeamID + PasswordBin []byte + Auths []Auth + } + + tests := []struct { + Name string + Args args + Expected *User + Err error + }{ + { + Name: "Success build user", + Args: args{ + Name: "xxx", + Email: "xx@yy.zz", + Lang: "en", + ID: uid, + Team: tid, + PasswordBin: pass, + Auths: []Auth{ + { + Provider: "ppp", + Sub: "sss", + }, + }, + }, + Expected: &User{ + id: uid, + team: tid, + email: "xx@yy.zz", + name: "xxx", + password: pass, + auths: []Auth{{Provider: "ppp", Sub: "sss"}}, + lang: language.English, + theme: ThemeDefault, + }, + }, { + Name: "failed invalid id", + Err: ErrInvalidID, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + build := func() *User { + t.Helper() + return New(). + ID(tt.Args.ID). + EncodedPassword(pass). + Name(tt.Args.Name). + Auths(tt.Args.Auths). + LangFrom(tt.Args.Lang). + Email(tt.Args.Email). + Team(tt.Args.Team). + MustBuild() + } + + if tt.Err != nil { + assert.PanicsWithValue(t, tt.Err, func() { _ = build() }) + } else { + assert.Equal(t, tt.Expected, build()) + } + }) + } +} + +func TestBuilder_Verification(t *testing.T) { + tests := []struct { + name string + input *Verification + want *Builder + }{ + { + name: "should return verification", + input: &Verification{ + verified: true, + code: "xxx", + expiration: time.Time{}, + }, + + want: &Builder{ + u: &User{ + verification: &Verification{ + verified: true, + code: "xxx", + expiration: time.Time{}, + }, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + b := New() + b.Verification(tt.input) + assert.Equal(t, tt.want, b) + }) + } +} diff --git a/server/pkg/user/id.go b/server/pkg/user/id.go new file mode 100644 index 000000000..12968a973 --- /dev/null +++ b/server/pkg/user/id.go @@ -0,0 +1,22 @@ +package user + +import "github.com/reearth/reearth-backend/pkg/id" + +type ID = id.UserID +type TeamID = id.TeamID + +var NewID = id.NewUserID +var NewTeamID = id.NewTeamID + +var MustID = id.MustUserID +var MustTeamID = id.MustTeamID + +var IDFrom = id.UserIDFrom +var TeamIDFrom = id.TeamIDFrom + +var IDFromRef = id.UserIDFromRef +var TeamIDFromRef = id.TeamIDFromRef + +var ErrInvalidID = id.ErrInvalidID + +type TeamIDList = id.TeamIDList diff --git a/server/pkg/user/initializer.go b/server/pkg/user/initializer.go new file mode 100644 index 000000000..070b6f086 --- /dev/null +++ b/server/pkg/user/initializer.go @@ -0,0 +1,64 @@ +package user + +import ( + "golang.org/x/text/language" +) + +type InitParams struct { + Email string + Name string + Sub *Auth + Password *string + Lang *language.Tag + Theme *Theme + UserID *ID + TeamID *TeamID +} + +func Init(p InitParams) (*User, *Team, error) { + if p.UserID == nil { + p.UserID = NewID().Ref() + } + if p.TeamID == nil { + p.TeamID = NewTeamID().Ref() + } + if p.Lang == nil { + p.Lang = &language.Tag{} + } + if p.Theme == nil { + t := ThemeDefault + p.Theme = &t + } + if p.Sub == nil { + p.Sub = GenReearthSub(p.UserID.String()) + } + + b := New(). + ID(*p.UserID). + Name(p.Name). + Email(p.Email). + Auths([]Auth{*p.Sub}). + Lang(*p.Lang). + Theme(*p.Theme) + if p.Password != nil { + b = b.PasswordPlainText(*p.Password) + } + u, err := b.Build() + if err != nil { + return nil, nil, err + } + + // create a user's own team + t, err := NewTeam(). + ID(*p.TeamID). + Name(p.Name). + Members(map[ID]Role{u.ID(): RoleOwner}). + Personal(true). + Build() + if err != nil { + return nil, nil, err + } + u.UpdateTeam(t.ID()) + + return u, t, err +} diff --git a/server/pkg/user/initializer_test.go b/server/pkg/user/initializer_test.go new file mode 100644 index 000000000..f99c5e307 --- /dev/null +++ b/server/pkg/user/initializer_test.go @@ -0,0 +1,124 @@ +package user + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestInit(t *testing.T) { + uid := NewID() + tid := NewTeamID() + expectedSub := Auth{ + Provider: "###", + Sub: "###", + } + tests := []struct { + Name, Email, Username string + Sub Auth + UID *ID + TID *TeamID + ExpectedUser *User + ExpectedTeam *Team + Err error + }{ + { + Name: "Success create user", + Email: "xx@yy.zz", + Username: "nnn", + Sub: Auth{ + Provider: "###", + Sub: "###", + }, + UID: &uid, + TID: &tid, + ExpectedUser: New(). + ID(uid). + Email("xx@yy.zz"). + Name("nnn"). + Team(tid). + Auths([]Auth{expectedSub}). + MustBuild(), + ExpectedTeam: NewTeam(). + ID(tid). + Name("nnn"). + Members(map[ID]Role{uid: RoleOwner}). + Personal(true). + MustBuild(), + Err: nil, + }, + { + Name: "Success nil team id", + Email: "xx@yy.zz", + Username: "nnn", + Sub: Auth{ + Provider: "###", + Sub: "###", + }, + UID: &uid, + TID: nil, + ExpectedUser: New(). + ID(uid). + Email("xx@yy.zz"). + Name("nnn"). + Team(tid). + Auths([]Auth{expectedSub}). + MustBuild(), + ExpectedTeam: NewTeam(). + NewID(). + Name("nnn"). + Members(map[ID]Role{uid: RoleOwner}). + Personal(true). + MustBuild(), + Err: nil, + }, + { + Name: "Success nil id", + Email: "xx@yy.zz", + Username: "nnn", + Sub: Auth{ + Provider: "###", + Sub: "###", + }, + UID: nil, + TID: &tid, + ExpectedUser: New(). + NewID(). + Email("xx@yy.zz"). + Name("nnn"). + Team(tid). + Auths([]Auth{expectedSub}). + MustBuild(), + ExpectedTeam: NewTeam(). + ID(tid). + Name("nnn"). + Members(map[ID]Role{uid: RoleOwner}). + Personal(true). + MustBuild(), + Err: nil, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + user, team, err := Init(InitParams{ + Email: tt.Email, + Name: tt.Username, + Sub: &tt.Sub, + UserID: tt.UID, + TeamID: tt.TID, + }) + if tt.Err == nil { + assert.Equal(t, tt.ExpectedUser.Email(), user.Email()) + assert.Equal(t, tt.ExpectedUser.Name(), user.Name()) + assert.Equal(t, tt.ExpectedUser.Auths(), user.Auths()) + + assert.Equal(t, tt.ExpectedTeam.Name(), team.Name()) + assert.Equal(t, tt.ExpectedTeam.IsPersonal(), team.IsPersonal()) + } else { + assert.Equal(t, tt.Err, err) + } + }) + } +} diff --git a/server/pkg/user/members.go b/server/pkg/user/members.go new file mode 100644 index 000000000..689818514 --- /dev/null +++ b/server/pkg/user/members.go @@ -0,0 +1,132 @@ +package user + +import ( + "errors" + "sort" +) + +var ( + ErrUserAlreadyJoined = errors.New("user already joined") + ErrCannotModifyPersonalTeam = errors.New("personal team cannot be modified") + ErrTeamWithProjects = errors.New("target team still has some project") + ErrTargetUserNotInTheTeam = errors.New("target user does not exist in the team") +) + +type Members struct { + members map[ID]Role + fixed bool +} + +func NewMembers() *Members { + m := &Members{members: map[ID]Role{}} + return m +} + +func NewFixedMembers(u ID) *Members { + m := &Members{members: map[ID]Role{u: RoleOwner}, fixed: true} + return m +} + +func NewMembersWith(members map[ID]Role) *Members { + m := &Members{members: map[ID]Role{}} + for k, v := range members { + m.members[k] = v + } + return m +} + +func CopyMembers(members *Members) *Members { + return NewMembersWith(members.members) +} + +func (m *Members) Members() map[ID]Role { + members := make(map[ID]Role) + for k, v := range m.members { + members[k] = v + } + return members +} + +func (m *Members) ContainsUser(u ID) bool { + for k := range m.members { + if k == u { + return true + } + } + return false +} + +func (m *Members) Count() int { + return len(m.members) +} + +func (m *Members) GetRole(u ID) Role { + return m.members[u] +} + +func (m *Members) UpdateRole(u ID, role Role) error { + if m.fixed { + return ErrCannotModifyPersonalTeam + } + if role == Role("") { + return nil + } + if _, ok := m.members[u]; ok { + m.members[u] = role + } else { + return ErrTargetUserNotInTheTeam + } + return nil +} + +func (m *Members) Join(u ID, role Role) error { + if m.fixed { + return ErrCannotModifyPersonalTeam + } + if _, ok := m.members[u]; ok { + return ErrUserAlreadyJoined + } + if role == Role("") { + role = RoleReader + } + m.members[u] = role + return nil +} + +func (m *Members) Leave(u ID) error { + if m.fixed { + return ErrCannotModifyPersonalTeam + } + if _, ok := m.members[u]; ok { + delete(m.members, u) + } else { + return ErrTargetUserNotInTheTeam + } + return nil +} + +func (m *Members) UsersByRole(role Role) []ID { + users := make([]ID, 0, len(m.members)) + for u, r := range m.members { + if r == role { + users = append(users, u) + } + } + + sort.SliceStable(users, func(a, b int) bool { + return users[a].Compare(users[b]) > 0 + }) + + return users +} + +func (m *Members) IsOnlyOwner(u ID) bool { + return len(m.UsersByRole(RoleOwner)) == 1 && m.members[u] == RoleOwner +} + +func (m *Members) Fixed() bool { + if m == nil { + return false + } + return m.fixed +} diff --git a/server/pkg/user/members_test.go b/server/pkg/user/members_test.go new file mode 100644 index 000000000..ffc29f717 --- /dev/null +++ b/server/pkg/user/members_test.go @@ -0,0 +1,303 @@ +package user + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewMembers(t *testing.T) { + m := NewMembers() + assert.NotNil(t, m) + assert.IsType(t, &Members{}, m) +} + +func TestNewMembersWith(t *testing.T) { + uid := NewID() + m := NewMembersWith(map[ID]Role{uid: RoleOwner}) + assert.NotNil(t, m) + assert.Equal(t, map[ID]Role{uid: RoleOwner}, m.Members()) +} + +func TestMembers_ContainsUser(t *testing.T) { + uid1 := NewID() + uid2 := NewID() + + tests := []struct { + Name string + M *Members + UID ID + Expected bool + }{ + { + Name: "existing user", + M: NewMembersWith(map[ID]Role{uid1: RoleOwner, uid2: RoleReader}), + UID: uid1, + Expected: true, + }, + { + Name: "not existing user", + M: NewMembersWith(map[ID]Role{uid2: RoleReader}), + UID: uid1, + Expected: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.M.ContainsUser(tt.UID) + assert.Equal(t, tt.Expected, res) + }) + } +} + +func TestCopyMembers(t *testing.T) { + uid := NewID() + m := NewMembersWith(map[ID]Role{uid: RoleOwner}) + m2 := CopyMembers(m) + assert.Equal(t, m, m2) +} + +func TestMembers_Count(t *testing.T) { + m := NewMembersWith(map[ID]Role{NewID(): RoleOwner}) + assert.Equal(t, len(m.Members()), m.Count()) +} + +func TestMembers_GetRole(t *testing.T) { + uid := NewID() + m := NewMembersWith(map[ID]Role{uid: RoleOwner}) + assert.Equal(t, RoleOwner, m.GetRole(uid)) +} + +func TestMembers_IsOnlyOwner(t *testing.T) { + uid := NewID() + m := NewMembersWith(map[ID]Role{uid: RoleOwner, NewID(): RoleReader}) + assert.True(t, m.IsOnlyOwner(uid)) +} + +func TestMembers_Leave(t *testing.T) { + uid := NewID() + + tests := []struct { + Name string + M *Members + UID ID + err error + }{ + { + Name: "success user left", + M: NewMembersWith(map[ID]Role{uid: RoleWriter, NewID(): RoleOwner}), + UID: uid, + err: nil, + }, + { + Name: "fail personal team", + M: NewFixedMembers(uid), + UID: uid, + err: ErrCannotModifyPersonalTeam, + }, + { + Name: "fail user not in the team", + M: NewMembersWith(map[ID]Role{uid: RoleWriter, NewID(): RoleOwner}), + UID: NewID(), + err: ErrTargetUserNotInTheTeam, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + err := tt.M.Leave(tt.UID) + if tt.err == nil { + assert.False(t, tt.M.ContainsUser(tt.UID)) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} + +func TestMembers_Members(t *testing.T) { + uid := NewID() + m := NewMembersWith(map[ID]Role{uid: RoleOwner}) + assert.Equal(t, map[ID]Role{uid: RoleOwner}, m.Members()) +} + +func TestMembers_UpdateRole(t *testing.T) { + uid := NewID() + + tests := []struct { + Name string + M *Members + UID ID + NewRole, Expected Role + err error + }{ + { + Name: "success role updated", + M: NewMembersWith(map[ID]Role{uid: RoleWriter}), + UID: uid, + NewRole: RoleOwner, + Expected: RoleOwner, + err: nil, + }, + { + Name: "nil role", + M: NewMembersWith(map[ID]Role{uid: RoleOwner}), + UID: uid, + NewRole: "", + Expected: RoleOwner, + err: nil, + }, + { + Name: "fail personal team", + M: NewFixedMembers(uid), + UID: uid, + NewRole: RoleOwner, + err: ErrCannotModifyPersonalTeam, + }, + { + Name: "fail user not in the team", + M: NewMembersWith(map[ID]Role{uid: RoleOwner}), + UID: NewID(), + NewRole: RoleOwner, + err: ErrTargetUserNotInTheTeam, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + err := tt.M.UpdateRole(tt.UID, tt.NewRole) + if tt.err == nil { + assert.Equal(t, tt.Expected, tt.M.GetRole(tt.UID)) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} + +func TestMembers_Join(t *testing.T) { + uid := NewID() + uid2 := NewID() + + tests := []struct { + Name string + M *Members + UID ID + JoinRole, ExpectedRole Role + err error + }{ + { + Name: "success join user", + M: NewMembersWith(map[ID]Role{uid: RoleWriter}), + UID: uid2, + JoinRole: "xxx", + ExpectedRole: "xxx", + err: nil, + }, + { + Name: "success join user", + M: NewMembersWith(map[ID]Role{uid: RoleWriter}), + UID: uid2, + JoinRole: "", + ExpectedRole: RoleReader, + err: nil, + }, + { + Name: "fail personal team", + M: NewFixedMembers(uid), + UID: uid2, + JoinRole: "xxx", + err: ErrCannotModifyPersonalTeam, + }, + { + Name: "fail user already joined", + M: NewMembersWith(map[ID]Role{uid: RoleOwner}), + UID: uid, + JoinRole: "", + err: ErrUserAlreadyJoined, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + err := tt.M.Join(tt.UID, tt.JoinRole) + if tt.err == nil { + assert.True(t, tt.M.ContainsUser(tt.UID)) + assert.Equal(t, tt.ExpectedRole, tt.M.GetRole(tt.UID)) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} + +func TestMembers_UsersByRole(t *testing.T) { + uid := NewID() + uid2 := NewID() + + tests := []struct { + Name string + M *Members + Role Role + Expected []ID + err error + }{ + { + Name: "success join user", + M: NewMembersWith(map[ID]Role{uid: "xxx", uid2: "xxx"}), + Role: "xxx", + Expected: []ID{uid2, uid}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.M.UsersByRole(tt.Role) + assert.Equal(t, tt.Expected, res) + }) + } +} + +func TestMembers_Fixed(t *testing.T) { + tests := []struct { + name string + target *Members + want bool + }{ + { + name: "true", + target: &Members{ + fixed: true, + }, + want: true, + }, + { + name: "empty", + target: &Members{}, + want: false, + }, + { + name: "nil", + want: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Fixed()) + }) + } +} diff --git a/server/pkg/user/password.go b/server/pkg/user/password.go new file mode 100644 index 000000000..6481ed204 --- /dev/null +++ b/server/pkg/user/password.go @@ -0,0 +1,123 @@ +package user + +import ( + "bytes" + "errors" + "unicode" + + "golang.org/x/crypto/bcrypt" +) + +var ( + DefaultPasswordEncoder PasswordEncoder = &BcryptPasswordEncoder{} + ErrEncodingPassword = errors.New("encoding password") + ErrInvalidPassword = errors.New("invalid password") + ErrPasswordLength = errors.New("password at least 8 characters") + ErrPasswordUpper = errors.New("password should have upper case letters") + ErrPasswordLower = errors.New("password should have lower case letters") + ErrPasswordNumber = errors.New("password should have numbers") +) + +type PasswordEncoder interface { + Encode(string) ([]byte, error) + Verify(string, []byte) (bool, error) +} + +type BcryptPasswordEncoder struct{} + +func (BcryptPasswordEncoder) Encode(pass string) ([]byte, error) { + return bcrypt.GenerateFromPassword([]byte(pass), 14) +} + +func (BcryptPasswordEncoder) Verify(s string, p []byte) (bool, error) { + err := bcrypt.CompareHashAndPassword(p, []byte(s)) + if err != nil { + if errors.Is(err, bcrypt.ErrMismatchedHashAndPassword) { + return false, nil + } + return false, err + } + return true, nil +} + +type NoopPasswordEncoder struct{} + +func (m NoopPasswordEncoder) Encode(pass string) ([]byte, error) { + return []byte(pass), nil +} + +func (m NoopPasswordEncoder) Verify(s string, p []byte) (bool, error) { + return bytes.Equal([]byte(s), []byte(p)), nil +} + +type MockPasswordEncoder struct{ Mock []byte } + +func (m MockPasswordEncoder) Encode(pass string) ([]byte, error) { + return append(m.Mock[:0:0], m.Mock...), nil +} + +func (m MockPasswordEncoder) Verify(s string, p []byte) (bool, error) { + return bytes.Equal(m.Mock, []byte(s)), nil +} + +type EncodedPassword []byte + +func NewEncodedPassword(pass string) (EncodedPassword, error) { + if err := ValidatePasswordFormat(pass); err != nil { + return nil, err + } + got, err := DefaultPasswordEncoder.Encode(pass) + if err != nil { + return nil, ErrEncodingPassword + } + return got, nil +} + +func MustEncodedPassword(pass string) EncodedPassword { + p, err := NewEncodedPassword(pass) + if err != nil { + panic(err) + } + return p +} + +func (p EncodedPassword) Clone() EncodedPassword { + if p == nil { + return nil + } + return append(p[:0:0], p...) +} + +func (p EncodedPassword) Verify(toVerify string) (bool, error) { + if len(toVerify) == 0 || len(p) == 0 { + return false, nil + } + return DefaultPasswordEncoder.Verify(toVerify, p) +} + +func ValidatePasswordFormat(pass string) error { + var hasNum, hasUpper, hasLower bool + for _, c := range pass { + switch { + case unicode.IsNumber(c): + hasNum = true + case unicode.IsUpper(c): + hasUpper = true + case unicode.IsLower(c) || c == ' ': + hasLower = true + } + } + if len(pass) < 8 { + return ErrPasswordLength + } + if !hasLower { + return ErrPasswordLower + } + if !hasUpper { + return ErrPasswordUpper + } + if !hasNum { + return ErrPasswordNumber + } + return nil +} diff --git a/server/pkg/user/password_reset.go b/server/pkg/user/password_reset.go new file mode 100644 index 000000000..6ec208723 --- /dev/null +++ b/server/pkg/user/password_reset.go @@ -0,0 +1,44 @@ +package user + +import ( + "time" + + "github.com/google/uuid" +) + +var timeNow = time.Now + +type PasswordReset struct { + Token string + CreatedAt time.Time +} + +func NewPasswordReset() *PasswordReset { + return &PasswordReset{ + Token: generateToken(), + CreatedAt: timeNow(), + } +} + +func PasswordResetFrom(token string, createdAt time.Time) *PasswordReset { + return &PasswordReset{ + Token: token, + CreatedAt: createdAt, + } +} + +func generateToken() string { + return uuid.New().String() +} + +func (pr *PasswordReset) Validate(token string) bool { + return pr != nil && pr.Token == token && pr.CreatedAt.Add(24*time.Hour).After(time.Now()) +} + +func (pr *PasswordReset) Clone() *PasswordReset { + if pr == nil { + return nil + } + pr2 := PasswordResetFrom(pr.Token, pr.CreatedAt) + return pr2 +} diff --git a/server/pkg/user/password_reset_test.go b/server/pkg/user/password_reset_test.go new file mode 100644 index 000000000..253a7b92c --- /dev/null +++ b/server/pkg/user/password_reset_test.go @@ -0,0 +1,103 @@ +package user + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestNewPasswordReset(t *testing.T) { + mockTime := time.Now() + timeNow = func() time.Time { + return mockTime + } + pr := NewPasswordReset() + assert.NotNil(t, pr) + assert.NotEmpty(t, pr.Token) + assert.Equal(t, mockTime, pr.CreatedAt) +} + +func TestPasswordReset_Validate(t *testing.T) { + tests := []struct { + name string + pr *PasswordReset + token string + want bool + }{ + { + name: "valid", + pr: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Now(), + }, + token: "xyz", + want: true, + }, + { + name: "wrong token", + pr: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Now(), + }, + token: "xxx", + want: false, + }, + { + name: "old request", + pr: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Now().Add(-24 * time.Hour), + }, + token: "xyz", + want: false, + }, + { + name: "nil request", + pr: nil, + token: "xyz", + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.pr.Validate(tt.token)) + }) + } +} + +func Test_generateToken(t *testing.T) { + t1 := generateToken() + t2 := generateToken() + + assert.NotNil(t, t1) + assert.NotNil(t, t2) + assert.NotEmpty(t, t1) + assert.NotEmpty(t, t2) + assert.NotEqual(t, t1, t2) + +} + +func TestPasswordResetFrom(t *testing.T) { + tests := []struct { + name string + token string + createdAt time.Time + want *PasswordReset + }{ + { + name: "prFrom", + token: "xyz", + createdAt: time.Unix(1, 1), + want: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, PasswordResetFrom(tt.token, tt.createdAt)) + }) + } +} diff --git a/server/pkg/user/password_test.go b/server/pkg/user/password_test.go new file mode 100644 index 000000000..fb962df88 --- /dev/null +++ b/server/pkg/user/password_test.go @@ -0,0 +1,51 @@ +package user + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "golang.org/x/crypto/bcrypt" +) + +func TestBcryptPasswordEncoder(t *testing.T) { + got, err := (&BcryptPasswordEncoder{}).Encode("abc") + assert.NoError(t, err) + err = bcrypt.CompareHashAndPassword(got, []byte("abc")) + assert.NoError(t, err) + + ok, err := (&BcryptPasswordEncoder{}).Verify("abc", got) + assert.NoError(t, err) + assert.True(t, ok) + ok, err = (&BcryptPasswordEncoder{}).Verify("abcd", got) + assert.NoError(t, err) + assert.False(t, ok) +} + +func TestMockPasswordEncoder(t *testing.T) { + got, err := (&MockPasswordEncoder{Mock: []byte("ABC")}).Encode("ABC") + assert.NoError(t, err) + assert.Equal(t, got, []byte("ABC")) + got, err = (&MockPasswordEncoder{Mock: []byte("ABC")}).Encode("abc") + assert.NoError(t, err) + assert.Equal(t, got, []byte("ABC")) + + ok, err := (&MockPasswordEncoder{Mock: []byte("ABC")}).Verify("ABC", got) + assert.NoError(t, err) + assert.True(t, ok) + ok, err = (&MockPasswordEncoder{Mock: []byte("ABC")}).Verify("abc", got) + assert.NoError(t, err) + assert.False(t, ok) +} + +func TestNoopPasswordEncoder(t *testing.T) { + got, err := (&NoopPasswordEncoder{}).Encode("abc") + assert.NoError(t, err) + assert.Equal(t, got, []byte("abc")) + + ok, err := (&NoopPasswordEncoder{}).Verify("abc", got) + assert.NoError(t, err) + assert.True(t, ok) + ok, err = (&NoopPasswordEncoder{}).Verify("abcd", got) + assert.NoError(t, err) + assert.False(t, ok) +} diff --git a/server/pkg/user/role.go b/server/pkg/user/role.go new file mode 100644 index 000000000..272856142 --- /dev/null +++ b/server/pkg/user/role.go @@ -0,0 +1,59 @@ +package user + +import ( + "errors" + "strings" +) + +var ( + // RoleOwner is a role who can have full controll of project + RoleOwner = Role("owner") + // RoleWriter is a role who can read and write project + RoleWriter = Role("writer") + // RoleReader is a role who can read project + RoleReader = Role("reader") + + roles = []Role{ + RoleOwner, + RoleWriter, + RoleReader, + } + + ErrInvalidRole = errors.New("invalid role") +) + +type Role string + +func checkRole(role Role) bool { + switch role { + case RoleOwner: + return true + case RoleWriter: + return true + case RoleReader: + return true + } + return false +} + +func RoleFromString(r string) (Role, error) { + role := Role(strings.ToLower(r)) + + if checkRole(role) { + return role, nil + } + return role, ErrInvalidRole +} + +func (r Role) Includes(role Role) bool { + for i, r2 := range roles { + if r == r2 { + for _, r3 := range roles[i:] { + if role == r3 { + return true + } + } + } + } + return false +} diff --git a/server/pkg/user/role_test.go b/server/pkg/user/role_test.go new file mode 100644 index 000000000..757a52c06 --- /dev/null +++ b/server/pkg/user/role_test.go @@ -0,0 +1,170 @@ +package user + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRoleFromString(t *testing.T) { + tests := []struct { + Name, Role string + Expected Role + Err error + }{ + { + Name: "Success reader", + Role: "reader", + Expected: Role("reader"), + Err: nil, + }, + { + Name: "fail invalid role", + Role: "xxx", + Expected: Role("xxx"), + Err: ErrInvalidRole, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := RoleFromString(tt.Role) + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) + } else { + assert.Equal(t, tt.Err, err) + } + }) + } +} + +func TestCheckRole(t *testing.T) { + tests := []struct { + Name string + Input Role + Expected bool + }{ + { + Name: "check reader", + Input: Role("reader"), + Expected: true, + }, + { + Name: "check writer", + Input: Role("writer"), + Expected: true, + }, + { + Name: "check owner", + Input: Role("owner"), + Expected: true, + }, + { + Name: "check unknown role", + Input: Role("xxx"), + Expected: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := checkRole(tt.Input) + assert.Equal(t, tt.Expected, res) + }) + } +} + +func TestRole_Includes(t *testing.T) { + tests := []struct { + Name string + Target Role + Input Role + Expected bool + }{ + { + Name: "reader and readner", + Target: RoleReader, + Input: RoleReader, + Expected: true, + }, + { + Name: "reader and writer", + Target: RoleReader, + Input: RoleWriter, + Expected: false, + }, + { + Name: "reader and owner", + Target: RoleReader, + Input: RoleOwner, + Expected: false, + }, + { + Name: "writer and readner", + Target: RoleWriter, + Input: RoleReader, + Expected: true, + }, + { + Name: "writer and writer", + Target: RoleWriter, + Input: RoleWriter, + Expected: true, + }, + { + Name: "writer and owner", + Target: RoleWriter, + Input: RoleOwner, + Expected: false, + }, + { + Name: "owner and readner", + Target: RoleOwner, + Input: RoleReader, + Expected: true, + }, + { + Name: "owner and writer", + Target: RoleOwner, + Input: RoleWriter, + Expected: true, + }, + { + Name: "owner and owner", + Target: RoleOwner, + Input: RoleOwner, + Expected: true, + }, + { + Name: "unknown role", + Target: Role("xxx"), + Input: Role("yyy"), + Expected: false, + }, + { + Name: "unknown role 2", + Target: RoleOwner, + Input: Role("yyy"), + Expected: false, + }, + { + Name: "unknown role 3", + Target: Role("xxx"), + Input: RoleOwner, + Expected: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Target.Includes(tt.Input) + assert.Equal(t, tt.Expected, res) + }) + } +} diff --git a/server/pkg/user/team.go b/server/pkg/user/team.go new file mode 100644 index 000000000..2a1312bcc --- /dev/null +++ b/server/pkg/user/team.go @@ -0,0 +1,27 @@ +package user + +type Team struct { + id TeamID + name string + members *Members +} + +func (t *Team) ID() TeamID { + return t.id +} + +func (t *Team) Name() string { + return t.name +} + +func (t *Team) Members() *Members { + return t.members +} + +func (t *Team) IsPersonal() bool { + return t.members.Fixed() +} + +func (t *Team) Rename(name string) { + t.name = name +} diff --git a/server/pkg/user/team_builder.go b/server/pkg/user/team_builder.go new file mode 100644 index 000000000..ebb9e986f --- /dev/null +++ b/server/pkg/user/team_builder.go @@ -0,0 +1,57 @@ +package user + +type TeamBuilder struct { + t *Team + members map[ID]Role + personal bool +} + +func NewTeam() *TeamBuilder { + return &TeamBuilder{t: &Team{}} +} + +func (b *TeamBuilder) Build() (*Team, error) { + if b.t.id.IsNil() { + return nil, ErrInvalidID + } + if b.members == nil { + b.t.members = NewMembers() + } else { + b.t.members = NewMembersWith(b.members) + } + b.t.members.fixed = b.personal + return b.t, nil +} + +func (b *TeamBuilder) MustBuild() *Team { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *TeamBuilder) ID(id TeamID) *TeamBuilder { + b.t.id = id + return b +} + +func (b *TeamBuilder) NewID() *TeamBuilder { + b.t.id = NewTeamID() + return b +} + +func (b *TeamBuilder) Name(name string) *TeamBuilder { + b.t.name = name + return b +} + +func (b *TeamBuilder) Members(members map[ID]Role) *TeamBuilder { + b.members = members + return b +} + +func (b *TeamBuilder) Personal(p bool) *TeamBuilder { + b.personal = p + return b +} diff --git a/server/pkg/user/team_builder_test.go b/server/pkg/user/team_builder_test.go new file mode 100644 index 000000000..0c898fa04 --- /dev/null +++ b/server/pkg/user/team_builder_test.go @@ -0,0 +1,180 @@ +package user + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTeamBuilder_ID(t *testing.T) { + tid := NewTeamID() + tm := NewTeam().ID(tid).MustBuild() + assert.Equal(t, tid, tm.ID()) +} + +func TestTeamBuilder_Members(t *testing.T) { + m := map[ID]Role{NewID(): RoleOwner} + tm := NewTeam().NewID().Members(m).MustBuild() + assert.Equal(t, m, tm.Members().Members()) +} + +func TestTeamBuilder_Personal(t *testing.T) { + tm := NewTeam().NewID().Personal(true).MustBuild() + assert.True(t, tm.IsPersonal()) +} + +func TestTeamBuilder_Name(t *testing.T) { + tm := NewTeam().NewID().Name("xxx").MustBuild() + assert.Equal(t, "xxx", tm.Name()) +} + +func TestTeamBuilder_NewID(t *testing.T) { + tm := NewTeam().NewID().MustBuild() + assert.NotNil(t, tm.ID()) +} + +func TestTeamBuilder_Build(t *testing.T) { + tid := NewTeamID() + uid := NewID() + + type args struct { + ID TeamID + Name string + Personal bool + Members map[ID]Role + } + + tests := []struct { + Name string + Args args + Expected *Team + Err error + }{ + { + Name: "success create team", + Args: args{ + ID: tid, + Name: "xxx", + Personal: true, + Members: map[ID]Role{uid: RoleOwner}, + }, + Expected: &Team{ + id: tid, + name: "xxx", + members: &Members{ + members: map[ID]Role{uid: RoleOwner}, + fixed: true, + }, + }, + }, { + Name: "success create team with nil members", + Args: args{ + ID: tid, + Name: "xxx", + }, + Expected: &Team{ + id: tid, + name: "xxx", + members: &Members{ + members: map[ID]Role{}, + fixed: false, + }, + }, + }, + { + Name: "fail invalid id", + Err: ErrInvalidID, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := NewTeam(). + ID(tt.Args.ID). + Members(tt.Args.Members). + Personal(tt.Args.Personal). + Name(tt.Args.Name). + Build() + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) + } else { + assert.Equal(t, tt.Err, err) + } + }) + } +} + +func TestTeamBuilder_MustBuild(t *testing.T) { + tid := NewTeamID() + uid := NewID() + + type args struct { + ID TeamID + Name string + Personal bool + Members map[ID]Role + } + + tests := []struct { + Name string + Args args + Expected *Team + Err error + }{ + { + Name: "success create team", + Args: args{ + ID: tid, + Name: "xxx", + Personal: true, + Members: map[ID]Role{uid: RoleOwner}, + }, + Expected: &Team{ + id: tid, + name: "xxx", + members: &Members{ + members: map[ID]Role{uid: RoleOwner}, + fixed: true, + }, + }, + }, { + Name: "success create team with nil members", + Args: args{ + ID: tid, + Name: "xxx", + }, + Expected: &Team{ + id: tid, + name: "xxx", + members: &Members{ + members: map[ID]Role{}, + fixed: false, + }, + }, + }, + { + Name: "fail invalid id", + Err: ErrInvalidID, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + build := func() *Team { + t.Helper() + return NewTeam().ID(tt.Args.ID).Members(tt.Args.Members).Personal(tt.Args.Personal).Name(tt.Args.Name).MustBuild() + } + + if tt.Err != nil { + assert.PanicsWithValue(t, tt.Err, func() { _ = build() }) + } else { + assert.Equal(t, tt.Expected, build()) + } + }) + } +} diff --git a/server/pkg/user/team_list.go b/server/pkg/user/team_list.go new file mode 100644 index 000000000..3a557982b --- /dev/null +++ b/server/pkg/user/team_list.go @@ -0,0 +1,65 @@ +package user + +type TeamList []*Team + +func (l TeamList) FilterByID(ids ...TeamID) TeamList { + if l == nil { + return nil + } + + res := make(TeamList, 0, len(l)) + for _, id := range ids { + var t2 *Team + for _, t := range l { + if t.ID() == id { + t2 = t + break + } + } + if t2 != nil { + res = append(res, t2) + } + } + return res +} +func (l TeamList) FilterByUserRole(u ID, r Role) TeamList { + if l == nil || u.IsNil() || r == "" { + return nil + } + + res := make(TeamList, 0, len(l)) + for _, t := range l { + tr := t.Members().GetRole(u) + if tr == r { + res = append(res, t) + } + } + return res +} + +func (l TeamList) FilterByUserRoleIncluding(u ID, r Role) TeamList { + if l == nil || u.IsNil() || r == "" { + return nil + } + + res := make(TeamList, 0, len(l)) + for _, t := range l { + tr := t.Members().GetRole(u) + if tr.Includes(r) { + res = append(res, t) + } + } + return res +} + +func (l TeamList) IDs() []TeamID { + if l == nil { + return nil + } + + res := make([]TeamID, 0, len(l)) + for _, t := range l { + res = append(res, t.ID()) + } + return res +} diff --git a/server/pkg/user/team_list_test.go b/server/pkg/user/team_list_test.go new file mode 100644 index 000000000..043493f58 --- /dev/null +++ b/server/pkg/user/team_list_test.go @@ -0,0 +1,84 @@ +package user + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTeamList_FilterByID(t *testing.T) { + tid1 := NewTeamID() + tid2 := NewTeamID() + t1 := &Team{id: tid1} + t2 := &Team{id: tid2} + + assert.Equal(t, TeamList{t1}, TeamList{t1, t2}.FilterByID(tid1)) + assert.Equal(t, TeamList{t2}, TeamList{t1, t2}.FilterByID(tid2)) + assert.Equal(t, TeamList{t1, t2}, TeamList{t1, t2}.FilterByID(tid1, tid2)) + assert.Equal(t, TeamList{}, TeamList{t1, t2}.FilterByID(NewTeamID())) + assert.Equal(t, TeamList(nil), TeamList(nil).FilterByID(tid1)) +} + +func TestTeamList_FilterByUserRole(t *testing.T) { + uid := NewID() + tid1 := NewTeamID() + tid2 := NewTeamID() + t1 := &Team{ + id: tid1, + members: &Members{ + members: map[ID]Role{ + uid: RoleReader, + }, + }, + } + t2 := &Team{ + id: tid2, + members: &Members{ + members: map[ID]Role{ + uid: RoleOwner, + }, + }, + } + + assert.Equal(t, TeamList{t1}, TeamList{t1, t2}.FilterByUserRole(uid, RoleReader)) + assert.Equal(t, TeamList{}, TeamList{t1, t2}.FilterByUserRole(uid, RoleWriter)) + assert.Equal(t, TeamList{t2}, TeamList{t1, t2}.FilterByUserRole(uid, RoleOwner)) + assert.Equal(t, TeamList(nil), TeamList(nil).FilterByUserRole(uid, RoleOwner)) +} + +func TestTeamList_FilterByUserRoleIncluding(t *testing.T) { + uid := NewID() + tid1 := NewTeamID() + tid2 := NewTeamID() + t1 := &Team{ + id: tid1, + members: &Members{ + members: map[ID]Role{ + uid: RoleReader, + }, + }, + } + t2 := &Team{ + id: tid2, + members: &Members{ + members: map[ID]Role{ + uid: RoleOwner, + }, + }, + } + + assert.Equal(t, TeamList{t1, t2}, TeamList{t1, t2}.FilterByUserRoleIncluding(uid, RoleReader)) + assert.Equal(t, TeamList{t2}, TeamList{t1, t2}.FilterByUserRoleIncluding(uid, RoleWriter)) + assert.Equal(t, TeamList{t2}, TeamList{t1, t2}.FilterByUserRoleIncluding(uid, RoleOwner)) + assert.Equal(t, TeamList(nil), TeamList(nil).FilterByUserRoleIncluding(uid, RoleOwner)) +} +func TestTeamList_IDs(t *testing.T) { + tid1 := NewTeamID() + tid2 := NewTeamID() + t1 := &Team{id: tid1} + t2 := &Team{id: tid2} + + assert.Equal(t, []TeamID{tid1, tid2}, TeamList{t1, t2}.IDs()) + assert.Equal(t, []TeamID{}, TeamList{}.IDs()) + assert.Equal(t, []TeamID(nil), TeamList(nil).IDs()) +} diff --git a/server/pkg/user/team_test.go b/server/pkg/user/team_test.go new file mode 100644 index 000000000..3f0ea578e --- /dev/null +++ b/server/pkg/user/team_test.go @@ -0,0 +1,37 @@ +package user + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTeam_ID(t *testing.T) { + tid := NewTeamID() + tm := NewTeam().ID(tid).MustBuild() + assert.Equal(t, tid, tm.ID()) +} + +func TestTeam_Name(t *testing.T) { + tm := NewTeam().NewID().Name("ttt").MustBuild() + assert.Equal(t, "ttt", tm.Name()) +} + +func TestTeam_Members(t *testing.T) { + m := map[ID]Role{ + NewID(): RoleOwner, + } + tm := NewTeam().NewID().Members(m).MustBuild() + assert.Equal(t, m, tm.Members().Members()) +} + +func TestTeam_IsPersonal(t *testing.T) { + tm := NewTeam().NewID().Personal(true).MustBuild() + assert.Equal(t, true, tm.IsPersonal()) +} + +func TestTeam_Rename(t *testing.T) { + tm := NewTeam().NewID().Name("ttt").MustBuild() + tm.Rename("ccc") + assert.Equal(t, "ccc", tm.Name()) +} diff --git a/server/pkg/user/theme.go b/server/pkg/user/theme.go new file mode 100644 index 000000000..0fa5a92c8 --- /dev/null +++ b/server/pkg/user/theme.go @@ -0,0 +1,13 @@ +package user + +type Theme string + +const ( + ThemeDefault Theme = "default" + ThemeLight Theme = "light" + ThemeDark Theme = "dark" +) + +func (t Theme) Ref() *Theme { + return &t +} diff --git a/server/pkg/user/user.go b/server/pkg/user/user.go new file mode 100644 index 000000000..177f16725 --- /dev/null +++ b/server/pkg/user/user.go @@ -0,0 +1,193 @@ +package user + +import ( + "errors" + "net/mail" + + "golang.org/x/text/language" +) + +var ( + ErrInvalidEmail = errors.New("invalid email") +) + +type User struct { + id ID + name string + email string + password EncodedPassword + team TeamID + auths []Auth + lang language.Tag + theme Theme + verification *Verification + passwordReset *PasswordReset +} + +func (u *User) ID() ID { + return u.id +} + +func (u *User) Name() string { + return u.name +} + +func (u *User) Email() string { + return u.email +} + +func (u *User) Team() TeamID { + return u.team +} + +func (u *User) Lang() language.Tag { + return u.lang +} + +func (u *User) Theme() Theme { + return u.theme +} + +func (u *User) Password() []byte { + return u.password +} + +func (u *User) UpdateName(name string) { + u.name = name +} + +func (u *User) UpdateEmail(email string) error { + if _, err := mail.ParseAddress(email); err != nil { + return ErrInvalidEmail + } + u.email = email + return nil +} + +func (u *User) UpdateTeam(team TeamID) { + u.team = team +} + +func (u *User) UpdateLang(lang language.Tag) { + u.lang = lang +} + +func (u *User) UpdateTheme(t Theme) { + u.theme = t +} + +func (u *User) Verification() *Verification { + return u.verification +} + +func (u *User) Auths() []Auth { + if u == nil { + return nil + } + return append([]Auth{}, u.auths...) +} + +func (u *User) ContainAuth(a Auth) bool { + if u == nil { + return false + } + for _, b := range u.auths { + if a == b || a.Provider == b.Provider { + return true + } + } + return false +} + +func (u *User) HasAuthProvider(p string) bool { + if u == nil { + return false + } + for _, b := range u.auths { + if b.Provider == p { + return true + } + } + return false +} + +func (u *User) AddAuth(a Auth) bool { + if u == nil { + return false + } + if !u.ContainAuth(a) { + u.auths = append(u.auths, a) + return true + } + return false +} + +func (u *User) RemoveAuth(a Auth) bool { + if u == nil || a.IsAuth0() { + return false + } + for i, b := range u.auths { + if a == b { + u.auths = append(u.auths[:i], u.auths[i+1:]...) + return true + } + } + return false +} + +func (u *User) GetAuthByProvider(provider string) *Auth { + if u == nil || u.auths == nil { + return nil + } + for _, b := range u.auths { + if provider == b.Provider { + return &b + } + } + return nil +} + +func (u *User) RemoveAuthByProvider(provider string) bool { + if u == nil || provider == "auth0" { + return false + } + for i, b := range u.auths { + if provider == b.Provider { + u.auths = append(u.auths[:i], u.auths[i+1:]...) + return true + } + } + return false +} + +func (u *User) ClearAuths() { + u.auths = []Auth{} +} + +func (u *User) SetPassword(pass string) error { + p, err := NewEncodedPassword(pass) + if err != nil { + return err + } + u.password = p + return nil +} + +func (u *User) MatchPassword(pass string) (bool, error) { + if u == nil { + return false, nil + } + return u.password.Verify(pass) +} + +func (u *User) PasswordReset() *PasswordReset { + return u.passwordReset +} + +func (u *User) SetPasswordReset(pr *PasswordReset) { + u.passwordReset = pr.Clone() +} + +func (u *User) SetVerification(v *Verification) { + u.verification = v +} diff --git a/server/pkg/user/user_test.go b/server/pkg/user/user_test.go new file mode 100644 index 000000000..39d37d980 --- /dev/null +++ b/server/pkg/user/user_test.go @@ -0,0 +1,637 @@ +package user + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + "golang.org/x/text/language" +) + +func TestUser(t *testing.T) { + uid := NewID() + tid := NewTeamID() + + tests := []struct { + Name string + User *User + Expected struct { + Id ID + Name string + Email string + Team TeamID + Auths []Auth + Lang language.Tag + } + }{ + { + Name: "create user", + User: New().ID(uid). + Team(tid). + Name("xxx"). + LangFrom("en"). + Email("ff@xx.zz"). + Auths([]Auth{{ + Provider: "aaa", + Sub: "sss", + }}).MustBuild(), + Expected: struct { + Id ID + Name string + Email string + Team TeamID + Auths []Auth + Lang language.Tag + }{ + Id: uid, + Name: "xxx", + Email: "ff@xx.zz", + Team: tid, + Auths: []Auth{{ + Provider: "aaa", + Sub: "sss", + }}, + Lang: language.Make("en"), + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected.Id, tc.User.ID()) + assert.Equal(t, tc.Expected.Name, tc.User.Name()) + assert.Equal(t, tc.Expected.Team, tc.User.Team()) + assert.Equal(t, tc.Expected.Auths, tc.User.Auths()) + assert.Equal(t, tc.Expected.Email, tc.User.Email()) + assert.Equal(t, tc.Expected.Lang, tc.User.Lang()) + }) + } +} + +func TestUser_AddAuth(t *testing.T) { + tests := []struct { + Name string + User *User + A Auth + Expected bool + }{ + { + Name: "nil user", + User: nil, + Expected: false, + }, + { + Name: "add new auth", + User: New().NewID().Email("aaa@bbb.com").MustBuild(), + A: Auth{ + Provider: "xxx", + Sub: "zzz", + }, + Expected: true, + }, + { + Name: "existing auth", + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + A: Auth{ + Provider: "xxx", + Sub: "zzz", + }, + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.User.AddAuth(tc.A) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestUser_RemoveAuth(t *testing.T) { + tests := []struct { + Name string + User *User + A Auth + Expected bool + }{ + { + Name: "nil user", + User: nil, + Expected: false, + }, + { + Name: "remove auth0", + User: New().NewID().Email("aaa@bbb.com").MustBuild(), + A: Auth{ + Provider: "auth0", + Sub: "zzz", + }, + Expected: false, + }, + { + Name: "existing auth", + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + A: Auth{ + Provider: "xxx", + Sub: "zzz", + }, + Expected: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.User.RemoveAuth(tc.A) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestUser_ContainAuth(t *testing.T) { + tests := []struct { + Name string + User *User + A Auth + Expected bool + }{ + { + Name: "nil user", + User: nil, + Expected: false, + }, + { + Name: "not existing auth", + User: New().NewID().Email("aaa@bbb.com").MustBuild(), + A: Auth{ + Provider: "auth0", + Sub: "zzz", + }, + Expected: false, + }, + { + Name: "existing auth", + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + A: Auth{ + Provider: "xxx", + Sub: "zzz", + }, + Expected: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.User.ContainAuth(tc.A) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestUser_HasAuthProvider(t *testing.T) { + tests := []struct { + Name string + User *User + P string + Expected bool + }{ + { + Name: "nil user", + User: nil, + Expected: false, + }, + { + Name: "not existing auth", + User: New().NewID().Email("aaa@bbb.com").MustBuild(), + P: "auth0", + Expected: false, + }, + { + Name: "existing auth", + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + P: "xxx", + Expected: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.User.HasAuthProvider(tc.P) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestUser_RemoveAuthByProvider(t *testing.T) { + tests := []struct { + Name string + User *User + Provider string + Expected bool + }{ + { + Name: "nil user", + User: nil, + Expected: false, + }, + { + Name: "remove auth0", + User: New().NewID().Email("aaa@bbb.com").MustBuild(), + Provider: "auth0", + Expected: false, + }, + { + Name: "existing auth", + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + Provider: "xxx", + Expected: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.User.RemoveAuthByProvider(tc.Provider) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestUser_ClearAuths(t *testing.T) { + u := New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild() + u.ClearAuths() + assert.Equal(t, 0, len(u.Auths())) +} + +func TestUser_Auths(t *testing.T) { + var u *User + assert.Equal(t, []Auth(nil), u.Auths()) +} + +func TestUser_UpdateEmail(t *testing.T) { + u := New().NewID().Email("abc@abc.com").MustBuild() + assert.NoError(t, u.UpdateEmail("abc@xyz.com")) + assert.Equal(t, "abc@xyz.com", u.Email()) + assert.Error(t, u.UpdateEmail("abcxyz")) +} + +func TestUser_UpdateLang(t *testing.T) { + u := New().NewID().Email("aaa@bbb.com").MustBuild() + u.UpdateLang(language.Make("en")) + assert.Equal(t, language.Make("en"), u.Lang()) +} + +func TestUser_UpdateTeam(t *testing.T) { + tid := NewTeamID() + u := New().NewID().Email("aaa@bbb.com").MustBuild() + u.UpdateTeam(tid) + assert.Equal(t, tid, u.Team()) +} + +func TestUser_UpdateName(t *testing.T) { + u := New().NewID().Email("aaa@bbb.com").MustBuild() + u.UpdateName("xxx") + assert.Equal(t, "xxx", u.Name()) +} + +func TestUser_GetAuthByProvider(t *testing.T) { + testCases := []struct { + Name string + User *User + Provider string + Expected *Auth + }{ + { + Name: "existing auth", + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + Provider: "xxx", + Expected: &Auth{ + Provider: "xxx", + Sub: "zzz", + }, + }, + { + Name: "not existing auth", + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + Provider: "yyy", + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.User.GetAuthByProvider(tc.Provider) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestUser_MatchPassword(t *testing.T) { + // bcrypt is not suitable for unit tests as it requires heavy computation + DefaultPasswordEncoder = &NoopPasswordEncoder{} + + password := MustEncodedPassword("abcDEF0!") + + type args struct { + pass string + } + + tests := []struct { + name string + password []byte + args args + want bool + wantErr bool + }{ + { + name: "should match", + password: password, + args: args{ + pass: "abcDEF0!", + }, + want: true, + wantErr: false, + }, + { + name: "should not match", + password: password, + args: args{ + pass: "xxx", + }, + want: false, + wantErr: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + u := &User{ + password: tc.password, + } + got, err := u.MatchPassword(tc.args.pass) + assert.Equal(tt, tc.want, got) + if tc.wantErr { + assert.Error(tt, err) + } else { + assert.NoError(tt, err) + } + }) + } +} + +func TestUser_SetPassword(t *testing.T) { + // bcrypt is not suitable for unit tests as it requires heavy computation + DefaultPasswordEncoder = &NoopPasswordEncoder{} + + type args struct { + pass string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "should set non-latin characters password", + args: args{ + pass: "ร€รชรฎรดรปtest1", + }, + want: "ร€รชรฎรดรปtest1", + }, + { + name: "should set latin characters password", + args: args{ + pass: "Testabc1", + }, + want: "Testabc1", + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + u := &User{} + _ = u.SetPassword(tc.args.pass) + got, err := u.password.Verify(tc.want) + assert.NoError(tt, err) + assert.True(tt, got) + }) + } +} + +func TestUser_PasswordReset(t *testing.T) { + testCases := []struct { + Name string + User *User + Expected *PasswordReset + }{ + { + Name: "not password request", + User: New().NewID().Email("aaa@bbb.com").MustBuild(), + Expected: nil, + }, + { + Name: "create new password request over existing one", + User: New().NewID().Email("aaa@bbb.com").PasswordReset(&PasswordReset{"xzy", time.Unix(0, 0)}).MustBuild(), + Expected: &PasswordReset{ + Token: "xzy", + CreatedAt: time.Unix(0, 0), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.User.PasswordReset()) + }) + } +} + +func TestUser_SetPasswordReset(t *testing.T) { + tests := []struct { + Name string + User *User + Pr *PasswordReset + Expected *PasswordReset + }{ + { + Name: "nil", + User: New().NewID().Email("aaa@bbb.com").MustBuild(), + Pr: nil, + Expected: nil, + }, + { + Name: "nil", + User: New().NewID().Email("aaa@bbb.com").MustBuild(), + Pr: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + Expected: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + }, + { + Name: "create new password request", + User: New().NewID().Email("aaa@bbb.com").MustBuild(), + Pr: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + Expected: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + }, + { + Name: "create new password request over existing one", + User: New().NewID().Email("aaa@bbb.com").PasswordReset(&PasswordReset{"xzy", time.Now()}).MustBuild(), + Pr: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + Expected: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + }, + { + Name: "remove none existing password request", + User: New().NewID().Email("aaa@bbb.com").MustBuild(), + Pr: nil, + Expected: nil, + }, + { + Name: "remove existing password request", + User: New().NewID().Email("aaa@bbb.com").PasswordReset(&PasswordReset{"xzy", time.Now()}).MustBuild(), + Pr: nil, + Expected: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + tt.User.SetPasswordReset(tt.Pr) + assert.Equal(t, tt.Expected, tt.User.PasswordReset()) + }) + } +} + +func TestUser_SetVerification(t *testing.T) { + input := &User{} + v := &Verification{ + verified: false, + code: "xxx", + expiration: time.Time{}, + } + input.SetVerification(v) + assert.Equal(t, v, input.verification) +} + +func TestUser_Verification(t *testing.T) { + v := NewVerification() + tests := []struct { + name string + verification *Verification + want *Verification + }{ + { + name: "should return the same verification", + verification: v, + want: v, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + u := &User{ + verification: tt.verification, + } + assert.Equal(t, tt.want, u.Verification()) + }) + } +} + +func Test_ValidatePassword(t *testing.T) { + tests := []struct { + name string + pass string + wantErr bool + }{ + { + name: "should pass", + pass: "Abcdafgh1", + wantErr: false, + }, + { + name: "shouldn't pass: length<8", + pass: "Aafgh1", + wantErr: true, + }, + { + name: "shouldn't pass: don't have numbers", + pass: "Abcdefghi", + wantErr: true, + }, + { + name: "shouldn't pass: don't have upper", + pass: "abcdefghi1", + wantErr: true, + }, + { + name: "shouldn't pass: don't have lower", + pass: "ABCDEFGHI1", + wantErr: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + out := ValidatePasswordFormat(tc.pass) + assert.Equal(tt, out != nil, tc.wantErr) + }) + } +} diff --git a/server/pkg/user/verification.go b/server/pkg/user/verification.go new file mode 100644 index 000000000..9ed458cb8 --- /dev/null +++ b/server/pkg/user/verification.go @@ -0,0 +1,82 @@ +package user + +import ( + "time" + + uuid "github.com/google/uuid" +) + +var Now = time.Now +var GenerateVerificationCode = generateCode + +func MockNow(t time.Time) func() { + Now = func() time.Time { return t } + return func() { Now = time.Now } +} + +func MockGenerateVerificationCode(code string) func() { + GenerateVerificationCode = func() string { return code } + return func() { GenerateVerificationCode = generateCode } +} + +func NewVerification() *Verification { + return &Verification{ + verified: false, + code: GenerateVerificationCode(), + expiration: Now().Add(time.Hour * 24), + } +} + +func VerificationFrom(c string, e time.Time, b bool) *Verification { + return &Verification{ + verified: b, + code: c, + expiration: e, + } +} + +type Verification struct { + verified bool + code string + expiration time.Time +} + +func (v *Verification) IsVerified() bool { + if v == nil { + return false + } + return v.verified +} + +func (v *Verification) Code() string { + if v == nil { + return "" + } + return v.code +} + +func (v *Verification) Expiration() time.Time { + if v == nil { + return time.Time{} + } + return v.expiration +} + +func generateCode() string { + return uuid.NewString() +} + +func (v *Verification) IsExpired() bool { + if v == nil { + return true + } + now := time.Now() + return now.After(v.expiration) +} + +func (v *Verification) SetVerified(b bool) { + if v == nil { + return + } + v.verified = b +} diff --git a/server/pkg/user/verification_test.go b/server/pkg/user/verification_test.go new file mode 100644 index 000000000..342c5937b --- /dev/null +++ b/server/pkg/user/verification_test.go @@ -0,0 +1,215 @@ +package user + +import ( + "testing" + "time" + + "github.com/google/uuid" + + "github.com/stretchr/testify/assert" +) + +func TestNewVerification(t *testing.T) { + type fields struct { + verified bool + code bool + expiration bool + } + + tests := []struct { + name string + want fields + }{ + { + name: "init verification struct", + + want: fields{ + verified: false, + code: true, + expiration: true, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := NewVerification() + assert.Equal(t, tt.want.verified, got.IsVerified()) + assert.Equal(t, tt.want.code, len(got.Code()) > 0) + assert.Equal(t, tt.want.expiration, !got.Expiration().IsZero()) + }) + } +} + +func TestVerification_Code(t *testing.T) { + tests := []struct { + name string + verification *Verification + want string + }{ + { + name: "should return a code string", + verification: &Verification{ + verified: false, + code: "xxx", + expiration: time.Time{}, + }, + want: "xxx", + }, + { + name: "should return a empty string", + want: "", + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.want, tc.verification.Code()) + }) + } +} + +func TestVerification_Expiration(t *testing.T) { + e := time.Now() + + tests := []struct { + name string + verification *Verification + want time.Time + }{ + { + name: "should return now date", + verification: &Verification{ + verified: false, + code: "", + expiration: e, + }, + want: e, + }, + { + name: "should return zero time", + verification: nil, + want: time.Time{}, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.want, tc.verification.Expiration()) + }) + } +} + +func TestVerification_IsExpired(t *testing.T) { + tim, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") + tim2 := time.Now().Add(time.Hour * 24) + + type fields struct { + verified bool + code string + expiration time.Time + } + tests := []struct { + name string + fields fields + want bool + }{ + { + name: "should be expired", + fields: fields{ + verified: false, + code: "xxx", + expiration: tim, + }, + want: true, + }, + { + name: "shouldn't be expired", + fields: fields{ + verified: false, + code: "xxx", + expiration: tim2, + }, + want: false, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + v := &Verification{ + verified: tc.fields.verified, + code: tc.fields.code, + expiration: tc.fields.expiration, + } + assert.Equal(tt, tc.want, v.IsExpired()) + }) + } +} + +func TestVerification_IsVerified(t *testing.T) { + tests := []struct { + name string + verification *Verification + want bool + }{ + { + name: "should return true", + verification: &Verification{ + verified: true, + }, + want: true, + }, + { + name: "should return false", + verification: nil, + want: false, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.want, tc.verification.IsVerified()) + }) + } +} + +func TestVerification_SetVerified(t *testing.T) { + tests := []struct { + name string + verification *Verification + input bool + want bool + }{ + { + name: "should set true", + verification: &Verification{ + verified: false, + }, + input: true, + want: true, + }, + { + name: "should return false", + verification: nil, + want: false, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tc.verification.SetVerified(tc.input) + assert.Equal(tt, tc.want, tc.verification.IsVerified()) + }) + } +} + +func Test_generateCode(t *testing.T) { + str := generateCode() + _, err := uuid.Parse(str) + assert.NoError(t, err) +} diff --git a/server/pkg/util/list.go b/server/pkg/util/list.go new file mode 100644 index 000000000..d6d6df0e7 --- /dev/null +++ b/server/pkg/util/list.go @@ -0,0 +1,130 @@ +package util + +import ( + "github.com/samber/lo" + "golang.org/x/exp/slices" +) + +type List[T comparable] []T + +func (l List[T]) Has(elements ...T) bool { + return Any(elements, func(e T) bool { + return slices.Contains(l, e) + }) +} + +func (l List[T]) At(i int) *T { + if len(l) == 0 || i < 0 || len(l) <= i { + return nil + } + e := l[i] + return &e +} + +func (l List[T]) Index(e T) int { + return slices.Index(l, e) +} + +func (l List[T]) Len() int { + return len(l) +} + +func (l List[T]) Copy() List[T] { + if l == nil { + return nil + } + return slices.Clone(l) +} + +func (l List[T]) Ref() *List[T] { + if l == nil { + return nil + } + return &l +} + +func (l List[T]) Refs() []*T { + return Map(l, func(e T) *T { + return &e + }) +} + +func (l List[T]) Delete(elements ...T) List[T] { + if l == nil { + return nil + } + m := l.Copy() + for _, e := range elements { + if j := l.Index(e); j >= 0 { + m = slices.Delete[[]T](m, j, j+1) + } + } + return m +} + +func (l List[T]) DeleteAt(i int) List[T] { + if l == nil { + return nil + } + m := l.Copy() + return slices.Delete(m, i, i+1) +} + +func (l List[T]) Add(elements ...T) List[T] { + res := l.Copy() + for _, e := range elements { + res = append(res, e) + } + return res +} + +func (l List[T]) AddUniq(elements ...T) List[T] { + res := append(List[T]{}, l...) + for _, id := range elements { + if !res.Has(id) { + res = append(res, id) + } + } + return res +} + +func (l List[T]) Insert(i int, elements ...T) List[T] { + if i < 0 || len(l) < i { + return l.Add(elements...) + } + return slices.Insert(l, i, elements...) +} + +func (l List[T]) Move(e T, to int) List[T] { + return l.MoveAt(l.Index(e), to) +} + +func (l List[T]) MoveAt(from, to int) List[T] { + if from < 0 || from == to || len(l) <= from { + return l.Copy() + } + e := l[from] + if from < to { + to-- + } + m := l.DeleteAt(from) + if to < 0 { + return m + } + return m.Insert(to, e) +} + +func (l List[T]) Reverse() List[T] { + return lo.Reverse(l.Copy()) +} + +func (l List[T]) Concat(m []T) List[T] { + return append(l, m...) +} + +func (l List[T]) Intersect(m []T) List[T] { + if l == nil { + return nil + } + return lo.Intersect(m, l) +} diff --git a/server/pkg/util/list_test.go b/server/pkg/util/list_test.go new file mode 100644 index 000000000..e54db50d4 --- /dev/null +++ b/server/pkg/util/list_test.go @@ -0,0 +1,168 @@ +package util + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +type T struct{} + +func TestList_Has(t *testing.T) { + l := List[int]{1, 2} + + assert.True(t, l.Has(1)) + assert.True(t, l.Has(1, 3)) + assert.False(t, l.Has(3)) + assert.False(t, List[int](nil).Has(1)) +} + +func TestList_At(t *testing.T) { + a := T{} + b := T{} + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).At(0)) + assert.Nil(t, l.At(-1)) + assert.Equal(t, &a, l.At(0)) + assert.Equal(t, &b, l.At(1)) + assert.Nil(t, l.At(2)) +} + +func TestList_Index(t *testing.T) { + l := List[string]{"a", "b"} + + assert.Equal(t, -1, List[string](nil).Index("a")) + assert.Equal(t, 0, l.Index("a")) + assert.Equal(t, 1, l.Index("b")) + assert.Equal(t, -1, l.Index("c")) +} + +func TestList_Len(t *testing.T) { + a := T{} + b := T{} + l := List[T]{a, b} + + assert.Equal(t, 0, List[T](nil).Len()) + assert.Equal(t, 2, l.Len()) +} + +func TestList_Copy(t *testing.T) { + a := &T{} + b := &T{} + l := List[*T]{a, b} + + assert.Nil(t, List[*T](nil).Copy()) + assert.Equal(t, List[*T]{a, b}, l.Copy()) + assert.NotSame(t, l, l.Copy()) + assert.Same(t, a, l.Copy()[0]) + assert.Same(t, b, l.Copy()[1]) +} + +func TestList_Ref(t *testing.T) { + a := T{} + b := T{} + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Ref()) + assert.Equal(t, &List[T]{a, b}, l.Ref()) +} + +func TestList_Refs(t *testing.T) { + a := T{} + b := T{} + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Refs()) + assert.Equal(t, []*T{&a, &b}, l.Refs()) +} + +func TestList_Delete(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Nil(t, (List[string])(nil).Delete("a")) + assert.Equal(t, List[string]{"a", "c"}, l.Delete("b")) + assert.Equal(t, List[string]{"a", "b", "c"}, l) +} + +func TestList_DeleteAt(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Nil(t, (List[string])(nil).DeleteAt(1)) + assert.Equal(t, List[string]{"a", "c"}, l.DeleteAt(1)) + assert.Equal(t, List[string]{"a", "b", "c"}, l) +} + +func TestList_Add(t *testing.T) { + l := List[string]{"a", "b"} + + assert.Equal(t, List[string]{"a"}, (List[string])(nil).Add("a")) + assert.Equal(t, List[string]{"a", "b", "c", "d"}, l.Add("c", "d")) + assert.Equal(t, List[string]{"a", "b"}, l) +} + +func TestList_AddUniq(t *testing.T) { + l := List[string]{"a", "b"} + + assert.Equal(t, List[string]{"a"}, (List[string])(nil).AddUniq("a")) + assert.Equal(t, List[string]{"a", "b", "c"}, l.AddUniq("a", "c")) + assert.Equal(t, List[string]{"a", "b"}, l) +} + +func TestList_Insert(t *testing.T) { + a := T{} + b := T{} + c := T{} + l := List[T]{a, b} + + assert.Equal(t, List[T]{a, b, c}, l.Insert(-1, c)) + assert.Equal(t, List[T]{c, a, b}, l.Insert(0, c)) + assert.Equal(t, List[T]{a, c, b}, l.Insert(1, c)) + assert.Equal(t, List[T]{a, b, c}, l.Insert(2, c)) + assert.Equal(t, List[T]{a, b, c}, l.Insert(3, c)) + assert.Equal(t, List[T]{a, b}, l) +} + +func TestList_Move(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Nil(t, (List[string])(nil).Move("a", -1)) + assert.Equal(t, List[string]{"b", "c"}, l.Move("a", -1)) + assert.Equal(t, List[string]{"a", "b", "c"}, l) + assert.Equal(t, List[string]{"c", "a", "b"}, l.Move("c", 0)) + assert.Equal(t, List[string]{"a", "c", "b"}, l.Move("b", 10)) +} + +func TestList_MoveAt(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Nil(t, (List[string])(nil).MoveAt(0, -1)) + assert.Equal(t, List[string]{"b", "c"}, l.MoveAt(0, -1)) + assert.Equal(t, List[string]{"a", "b", "c"}, l) + assert.Equal(t, List[string]{"c", "a", "b"}, l.MoveAt(2, 0)) + assert.Equal(t, List[string]{"a", "c", "b"}, l.MoveAt(1, 10)) +} + +func TestList_Reverse(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Nil(t, (List[string])(nil).Reverse()) + assert.Equal(t, List[string]{"c", "b", "a"}, l.Reverse()) + assert.Equal(t, List[string]{"a", "b", "c"}, l) +} + +func TestList_Concat(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Equal(t, List[string]{"a"}, (List[string])(nil).Concat(List[string]{"a"})) + assert.Equal(t, List[string]{"a", "b", "c", "d", "e"}, l.Concat(List[string]{"d", "e"})) + assert.Equal(t, List[string]{"a", "b", "c"}, l) +} + +func TestList_Intersect(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Nil(t, (List[string])(nil).Intersect(List[string]{"a"})) + assert.Equal(t, List[string]{"a", "b"}, l.Intersect(List[string]{"b", "e", "a"})) + assert.Equal(t, List[string]{"a", "b", "c"}, l) +} diff --git a/server/pkg/util/map.go b/server/pkg/util/map.go new file mode 100644 index 000000000..21cd1322a --- /dev/null +++ b/server/pkg/util/map.go @@ -0,0 +1,157 @@ +package util + +import "sync" + +type SyncMap[K, V any] struct { + m sync.Map +} + +func (m *SyncMap[K, V]) Load(key K) (vv V, _ bool) { + v, ok := m.m.Load(key) + if ok { + vv = v.(V) + } + return vv, ok +} + +func (m *SyncMap[K, V]) LoadAll(keys ...K) (r []V) { + for _, k := range keys { + v, ok := m.Load(k) + if ok { + r = append(r, v) + } + } + return r +} + +func (m *SyncMap[K, V]) Store(key K, value V) { + m.m.Store(key, value) +} + +func (m *SyncMap[K, V]) LoadOrStore(key K, value V) (vv V, _ bool) { + v, ok := m.m.LoadOrStore(key, value) + if ok { + vv = v.(V) + } + return vv, ok +} + +func (m *SyncMap[K, V]) LoadAndDelete(key K) (vv V, ok bool) { + v, ok := m.m.LoadAndDelete(key) + if ok { + vv = v.(V) + } + return vv, ok +} + +func (m *SyncMap[K, V]) Delete(key K) { + m.m.Delete(key) +} + +func (m *SyncMap[K, V]) DeleteAll(key ...K) { + for _, k := range key { + m.Delete(k) + } +} + +func (m *SyncMap[K, V]) Range(f func(key K, value V) bool) { + m.m.Range(func(key, value any) bool { + return f(key.(K), value.(V)) + }) +} + +func (m *SyncMap[K, V]) Find(f func(key K, value V) bool) (v V) { + m.Range(func(key K, value V) bool { + if f(key, value) { + v = value + return false + } + return true + }) + return +} + +func (m *SyncMap[K, V]) FindAll(f func(key K, value V) bool) (v []V) { + m.Range(func(key K, value V) bool { + if f(key, value) { + v = append(v, value) + } + return true + }) + return +} + +func (m *SyncMap[K, V]) Clone() *SyncMap[K, V] { + if m == nil { + return nil + } + n := &SyncMap[K, V]{} + m.Range(func(key K, value V) bool { + n.Store(key, value) + return true + }) + return n +} + +func (m *SyncMap[K, V]) Map(f func(K, V) V) *SyncMap[K, V] { + n := m.Clone() + n.Range(func(key K, value V) bool { + n.Store(key, f(key, value)) + return true + }) + return n +} + +func (m *SyncMap[K, V]) Merge(n *SyncMap[K, V]) { + n.Range(func(key K, value V) bool { + m.Store(key, value) + return true + }) +} + +func (m *SyncMap[K, V]) Keys() (l []K) { + m.Range(func(key K, _ V) bool { + l = append(l, key) + return true + }) + return l +} + +func (m *SyncMap[K, V]) Values() (l []V) { + m.Range(func(_ K, value V) bool { + l = append(l, value) + return true + }) + return l +} + +func (m *SyncMap[K, V]) Len() (i int) { + m.m.Range(func(_ any, _ any) bool { + i++ + return true + }) + return +} + +type LockMap[T any] struct { + m SyncMap[T, *sync.Mutex] +} + +func (m *LockMap[T]) Lock(k T) func() { + nl := &sync.Mutex{} + l, ok := m.m.LoadOrStore(k, nl) + if ok { + l.Lock() + } else { + nl.Lock() + } + return func() { + m.Unlock(k) + } +} + +func (m *LockMap[T]) Unlock(k T) { + if l, ok := m.m.LoadAndDelete(k); ok { + l.Unlock() + } +} diff --git a/server/pkg/util/map_test.go b/server/pkg/util/map_test.go new file mode 100644 index 000000000..cd71d24dc --- /dev/null +++ b/server/pkg/util/map_test.go @@ -0,0 +1,211 @@ +package util + +import ( + "sync" + "testing" + + "github.com/stretchr/testify/assert" + "golang.org/x/exp/slices" +) + +func TestSyncMap_Load_Store(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + + res, ok := s.Load("a") + assert.Equal(t, 1, res) + assert.True(t, ok) + + res, ok = s.Load("b") + assert.Equal(t, 0, res) + assert.False(t, ok) +} + +func TestSyncMap_LoadAll(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + + assert.Equal(t, []int{1, 2}, s.LoadAll("a", "b", "c")) + assert.Equal(t, []int(nil), s.LoadAll("d")) +} + +func TestSyncMap_LoadOrStore(t *testing.T) { + s := &SyncMap[string, string]{} + res, ok := s.LoadOrStore("a", "A") + assert.Equal(t, "", res) + assert.False(t, ok) + res, ok = s.LoadOrStore("a", "AA") + assert.Equal(t, "A", res) + assert.True(t, ok) + res, ok = s.Load("a") + assert.Equal(t, "A", res) + assert.True(t, ok) +} + +func TestSyncMap_Delete(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + + s.Delete("a") + res, ok := s.Load("a") + assert.Equal(t, 0, res) + assert.False(t, ok) + + s.Delete("b") // no panic +} + +func TestSyncMap_DeleteAll(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + + s.DeleteAll("a", "b") + res, ok := s.Load("a") + assert.Equal(t, 0, res) + assert.False(t, ok) + res, ok = s.Load("b") + assert.Equal(t, 0, res) + assert.False(t, ok) + + s.DeleteAll("c") // no panic +} + +func TestSyncMap_Range(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + + var vv int + s.Range(func(k string, v int) bool { + if k == "a" { + vv = v + return false + } + return true + }) + assert.Equal(t, 1, vv) +} + +func TestSyncMap_Find(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + + res := s.Find(func(k string, v int) bool { + return k == "a" + }) + assert.Equal(t, 1, res) + + res = s.Find(func(k string, v int) bool { + return k == "c" + }) + assert.Equal(t, 0, res) +} + +func TestSyncMap_FindAll(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + + res := s.FindAll(func(k string, v int) bool { + return k == "a" || k == "b" + }) + slices.Sort(res) + assert.Equal(t, []int{1, 2}, res) + + res = s.FindAll(func(k string, v int) bool { + return k == "c" + }) + assert.Equal(t, []int(nil), res) +} + +func TestSyncMap_Map(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + u := s.Map(func(k string, v int) int { + if k == "a" { + return 3 + } + return v + }) + + keys := u.Keys() + slices.Sort(keys) + values := u.Values() + slices.Sort(values) + assert.Equal(t, []string{"a", "b"}, keys) + assert.Equal(t, []int{2, 3}, values) +} + +func TestSyncMap_Merge(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + u := &SyncMap[string, int]{} + u.Store("c", 3) + s.Merge(u) + + keys := s.Keys() + slices.Sort(keys) + values := s.Values() + slices.Sort(values) + assert.Equal(t, []string{"a", "b", "c"}, keys) + assert.Equal(t, []int{1, 2, 3}, values) +} + +func TestSyncMap_Keys(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + keys := s.Keys() + slices.Sort(keys) + assert.Equal(t, []string{"a", "b"}, keys) +} + +func TestSyncMap_Values(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + values := s.Values() + slices.Sort(values) + assert.Equal(t, []int{1, 2}, values) +} + +func TestSyncMap_Len(t *testing.T) { + s := SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + assert.Equal(t, 2, s.Len()) +} + +func TestLockMap(t *testing.T) { + m := LockMap[string]{} + res := []string{} + wg := sync.WaitGroup{} + + wg.Add(3) + go func() { + u := m.Lock("a") + res = append(res, "a") + u() + wg.Done() + }() + go func() { + u := m.Lock("b") + res = append(res, "b") + u() + wg.Done() + }() + go func() { + u := m.Lock("a") + res = append(res, "c") + u() + wg.Done() + }() + + wg.Wait() + slices.Sort(res) + assert.Equal(t, []string{"a", "b", "c"}, res) +} diff --git a/server/pkg/util/now.go b/server/pkg/util/now.go new file mode 100644 index 000000000..b4904054b --- /dev/null +++ b/server/pkg/util/now.go @@ -0,0 +1,10 @@ +package util + +import "time" + +var Now = time.Now + +func MockNow(t time.Time) func() { + Now = func() time.Time { return t } + return func() { Now = time.Now } +} diff --git a/server/pkg/util/slice.go b/server/pkg/util/slice.go new file mode 100644 index 000000000..be65b65ac --- /dev/null +++ b/server/pkg/util/slice.go @@ -0,0 +1,135 @@ +package util + +import "github.com/samber/lo" + +type Element[T any] struct { + Index int + Element T +} + +// Enumerate returns a new slice with each element and its index. +func Enumerate[T any](collection []T) []Element[T] { + if collection == nil { + return nil + } + + return lo.Map(collection, func(e T, i int) Element[T] { + return Element[T]{ + Index: i, + Element: e, + } + }) +} + +// Map is similar to lo.Map, but accepts an iteratee without the index argument. +func Map[T any, V any](collection []T, iteratee func(v T) V) []V { + if collection == nil { + return nil + } + + return lo.Map(collection, func(v T, _ int) V { + return iteratee(v) + }) +} + +// TryMap is similar to Map, but when an error occurs in the iteratee, it terminates the iteration and returns an error. +func TryMap[T any, V any](collection []T, iteratee func(v T) (V, error)) ([]V, error) { + if collection == nil { + return nil, nil + } + + m := make([]V, 0, len(collection)) + for _, e := range collection { + j, err := iteratee(e) + if err != nil { + return nil, err + } + m = append(m, j) + } + return m, nil +} + +// FilterMap is similar to Map, but if the iteratee returns nil, that element will be omitted from the new slice. +func FilterMap[T any, V any](collection []T, iteratee func(v T) *V) []V { + if collection == nil { + return nil + } + + m := make([]V, 0, len(collection)) + for _, e := range collection { + if j := iteratee(e); j != nil { + m = append(m, *j) + } + } + return m +} + +// FilterMapOk is similar to FilterMap, but the iteratee can return a boolean as the second return value, +// and it is false, that element will be omitted from the new slice. +func FilterMapOk[T any, V any](collection []T, iteratee func(v T) (V, bool)) []V { + if collection == nil { + return nil + } + + m := make([]V, 0, len(collection)) + for _, e := range collection { + if j, ok := iteratee(e); ok { + m = append(m, j) + } + } + return m +} + +// FilterMapR is similar to FilterMap, but if the return value of the iteratee is not nil, +// it is not dereferenced and is used as the value of the new element. +func FilterMapR[T any, V any](collection []T, iteratee func(v T) *V) []*V { + if collection == nil { + return nil + } + + m := make([]*V, 0, len(collection)) + for _, e := range collection { + if j := iteratee(e); j != nil { + m = append(m, j) + } + } + return m +} + +// https://github.com/samber/lo/issues/54 +func All[T any](collection []T, predicate func(T) bool) bool { + for _, e := range collection { + if !predicate(e) { + return false + } + } + return true +} + +// https://github.com/samber/lo/issues/54 +func Any[T any](collection []T, predicate func(T) bool) bool { + for _, e := range collection { + if predicate(e) { + return true + } + } + return false +} + +// Filter is similar to lo.Filter, but accepts an iteratee without the index argument. +func Filter[T any](collection []T, iteratee func(v T) bool) []T { + if collection == nil { + return nil + } + + return lo.Filter(collection, func(v T, _ int) bool { + return iteratee(v) + }) +} + +// DerefSlice drops nil elements in the slice and return a new slice with dereferenced elements. +func DerefSlice[T any](collection []*T) []T { + return FilterMap(collection, func(e *T) *T { + return e + }) +} diff --git a/server/pkg/util/slice_test.go b/server/pkg/util/slice_test.go new file mode 100644 index 000000000..051ac8932 --- /dev/null +++ b/server/pkg/util/slice_test.go @@ -0,0 +1,95 @@ +package util + +import ( + "errors" + "testing" + + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestEnumerate(t *testing.T) { + assert.Nil(t, Enumerate[int](nil)) + assert.Equal(t, []Element[int]{ + {Index: 0, Element: 3}, + {Index: 1, Element: 2}, + {Index: 2, Element: 1}, + }, Enumerate([]int{3, 2, 1})) +} + +func TestMap(t *testing.T) { + assert.Nil(t, Map[int, bool](nil, nil)) + assert.Equal(t, []bool{true, false, true}, Map([]int{1, 0, 2}, func(i int) bool { return i != 0 })) +} + +func TestTryMap(t *testing.T) { + res, err := TryMap[int, bool](nil, nil) + assert.Nil(t, res) + assert.NoError(t, err) + + iteratee := func(i int) (bool, error) { + if i == 0 { + return false, errors.New("aaa") + } + return true, nil + } + res, err = TryMap([]int{1, 2, 3}, iteratee) + assert.Equal(t, []bool{true, true, true}, res) + assert.NoError(t, err) + + res, err = TryMap([]int{1, 0, 3}, iteratee) + assert.Nil(t, res) + assert.Equal(t, errors.New("aaa"), err) +} + +func TestFilterMap(t *testing.T) { + assert.Nil(t, FilterMap[int, bool](nil, nil)) + assert.Equal(t, []bool{true, false}, FilterMap([]int{1, 0, 2}, func(i int) *bool { + if i == 0 { + return nil + } + return lo.ToPtr(i == 1) + })) +} + +func TestFilterMapOk(t *testing.T) { + assert.Nil(t, FilterMapOk[int, bool](nil, nil)) + assert.Equal(t, []bool{true, false}, FilterMapOk([]int{1, 0, 2}, func(i int) (bool, bool) { + if i == 0 { + return false, false + } + return i == 1, true + })) +} + +func TestFilterR(t *testing.T) { + assert.Nil(t, FilterMapR[int, bool](nil, nil)) + assert.Equal(t, []*bool{lo.ToPtr(true), lo.ToPtr(false)}, FilterMapR([]int{1, 0, 2}, func(i int) *bool { + if i == 0 { + return nil + } + return lo.ToPtr(i == 1) + })) +} + +func TestAll(t *testing.T) { + assert.True(t, All([]int{1, 2, 3}, func(i int) bool { return i < 4 })) + assert.False(t, All([]int{1, 2, 3}, func(i int) bool { return i < 3 })) +} + +func TestAny(t *testing.T) { + assert.True(t, Any([]int{1, 2, 3}, func(i int) bool { return i == 1 })) + assert.False(t, Any([]int{1, 2, 3}, func(i int) bool { return i == 4 })) +} + +func TestFilter(t *testing.T) { + assert.Nil(t, Filter[int](nil, nil)) + assert.Equal(t, []int{1, 2}, Filter([]int{1, 0, 2}, func(i int) bool { + return i != 0 + })) +} + +func TestDerefSlice(t *testing.T) { + assert.Nil(t, DerefSlice[int](nil)) + assert.Equal(t, []int{1, 0, 2}, DerefSlice([]*int{lo.ToPtr(1), nil, lo.ToPtr(0), lo.ToPtr(2)})) +} diff --git a/server/pkg/util/util.go b/server/pkg/util/util.go new file mode 100644 index 000000000..6c4b1383c --- /dev/null +++ b/server/pkg/util/util.go @@ -0,0 +1,25 @@ +package util + +func Must[T any](v T, err error) T { + if err != nil { + panic(err) + } + return v +} + +func IsZero[T comparable](v T) bool { + var z T + return v == z +} + +func IsNotZero[T comparable](v T) bool { + return !IsZero(v) +} + +func Deref[T any](r *T) T { + if r == nil { + var z T + return z + } + return *r +} diff --git a/server/pkg/util/util_test.go b/server/pkg/util/util_test.go new file mode 100644 index 000000000..c017afb43 --- /dev/null +++ b/server/pkg/util/util_test.go @@ -0,0 +1,52 @@ +package util + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestMust(t *testing.T) { + a := &struct{}{} + err := errors.New("ERR") + assert.Same(t, a, Must(a, nil)) + assert.PanicsWithValue(t, err, func() { + _ = Must(a, err) + }) +} + +func TestIsZero(t *testing.T) { + assert.True(t, IsZero(0)) + assert.False(t, IsZero(-1)) + assert.True(t, IsZero(struct { + A int + B string + }{})) + assert.False(t, IsZero(struct { + A int + B string + }{A: 1})) + assert.True(t, IsZero((*(struct{}))(nil))) + assert.False(t, IsZero((*(struct{}))(&struct{}{}))) +} + +func TestIsNotZero(t *testing.T) { + assert.False(t, IsNotZero(0)) + assert.True(t, IsNotZero(-1)) + assert.False(t, IsNotZero(struct { + A int + B string + }{})) + assert.True(t, IsNotZero(struct { + A int + B string + }{A: 1})) + assert.False(t, IsNotZero((*(struct{}))(nil))) + assert.True(t, IsNotZero((*(struct{}))(&struct{}{}))) +} + +func TestDeref(t *testing.T) { + assert.Equal(t, struct{ A int }{}, Deref((*(struct{ A int }))(nil))) + assert.Equal(t, struct{ A int }{A: 1}, Deref((*(struct{ A int }))(&struct{ A int }{A: 1}))) +} diff --git a/server/pkg/value/bool.go b/server/pkg/value/bool.go new file mode 100644 index 000000000..68f172f32 --- /dev/null +++ b/server/pkg/value/bool.go @@ -0,0 +1,44 @@ +package value + +import "strconv" + +var TypeBool Type = "bool" + +type propertyBool struct{} + +func (p *propertyBool) I2V(i interface{}) (interface{}, bool) { + switch v := i.(type) { + case bool: + return v, true + case string: + if b, err := strconv.ParseBool(v); err == nil { + return b, true + } + case *bool: + if v != nil { + return p.I2V(*v) + } + case *string: + if v != nil { + return p.I2V(*v) + } + } + return nil, false +} + +func (*propertyBool) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyBool) Validate(i interface{}) bool { + _, ok := i.(bool) + return ok +} + +func (v *Value) ValueBool() (vv bool, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(bool) + return +} diff --git a/server/pkg/value/bool_test.go b/server/pkg/value/bool_test.go new file mode 100644 index 000000000..3fa0fa52e --- /dev/null +++ b/server/pkg/value/bool_test.go @@ -0,0 +1,64 @@ +package value + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_propertyBool_I2V(t *testing.T) { + tr := true + fa := false + trs1 := "true" + trs2 := "TRUE" + trs3 := "True" + trs4 := "T" + trs5 := "t" + trs6 := "1" + fas1 := "false" + fas2 := "FALSE" + fas3 := "False" + fas4 := "F" + fas5 := "f" + fas6 := "0" + + tests := []struct { + name string + args []interface{} + want1 interface{} + want2 bool + }{ + { + name: "true", + args: []interface{}{tr, trs1, trs2, trs3, trs4, trs5, trs6, &tr, &trs1, &trs2, &trs3, &trs4, &trs5, &trs6}, + want1: true, + want2: true, + }, + { + name: "false", + args: []interface{}{fa, fas1, fas2, fas3, fas4, fas5, fas6, &fa, &fas1, &fas2, &fas3, &fas4, &fas5, &fas6}, + want1: false, + want2: true, + }, + { + name: "nil", + args: []interface{}{"foo", (*bool)(nil), (*string)(nil), nil}, + want1: nil, + want2: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + p := &propertyBool{} + for i, v := range tt.args { + got1, got2 := p.I2V(v) + assert.Equal(t, tt.want1, got1, "test %d", i) + assert.Equal(t, tt.want2, got2, "test %d", i) + } + }) + } +} diff --git a/server/pkg/value/coordinates.go b/server/pkg/value/coordinates.go new file mode 100644 index 000000000..06ade2678 --- /dev/null +++ b/server/pkg/value/coordinates.go @@ -0,0 +1,78 @@ +package value + +import "github.com/mitchellh/mapstructure" + +type Coordinates []LatLngHeight + +// CoordinatesFrom generates a new Coordinates from slice such as [lon, lat, alt, lon, lat, alt, ...] +func CoordinatesFrom(coords []float64) Coordinates { + if len(coords) == 0 { + return nil + } + + r := make([]LatLngHeight, 0, len(coords)/3) + l := LatLngHeight{} + for i, c := range coords { + switch i % 3 { + case 0: + l = LatLngHeight{} + l.Lng = c + case 1: + l.Lat = c + case 2: + l.Height = c + r = append(r, l) + } + } + + return r +} + +var TypeCoordinates Type = "coordinates" + +type propertyCoordinates struct{} + +func (p *propertyCoordinates) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(Coordinates); ok { + return v, true + } else if v, ok := i.(*Coordinates); ok { + if v != nil { + return p.I2V(*v) + } + return nil, false + } else if v2, ok := i.([]float64); ok { + if v2 == nil { + return nil, false + } + return CoordinatesFrom(v2), true + } + + v2 := Coordinates{} + if err := mapstructure.Decode(i, &v2); err == nil { + return v2, true + } + + v1 := []float64{} + if err := mapstructure.Decode(i, &v1); err == nil { + return CoordinatesFrom(v1), true + } + + return nil, false +} + +func (*propertyCoordinates) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyCoordinates) Validate(i interface{}) bool { + _, ok := i.(bool) + return ok +} + +func (v *Value) ValueCoordinates() (vv Coordinates, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(Coordinates) + return +} diff --git a/server/pkg/value/latlng.go b/server/pkg/value/latlng.go new file mode 100644 index 000000000..f824df3fd --- /dev/null +++ b/server/pkg/value/latlng.go @@ -0,0 +1,62 @@ +package value + +import "github.com/mitchellh/mapstructure" + +type LatLng struct { + Lat float64 `json:"lat" mapstructure:"lat"` + Lng float64 `json:"lng" mapstructure:"lng"` +} + +func (l *LatLng) Clone() *LatLng { + if l == nil { + return nil + } + return &LatLng{ + Lat: l.Lat, + Lng: l.Lng, + } +} + +var TypeLatLng Type = "latlng" + +type propertyLatLng struct{} + +func (p *propertyLatLng) I2V(i interface{}) (interface{}, bool) { + switch v := i.(type) { + case LatLng: + return v, true + case LatLngHeight: + return LatLng{Lat: v.Lat, Lng: v.Lng}, true + case *LatLng: + if v != nil { + return p.I2V(*v) + } + case *LatLngHeight: + if v != nil { + return p.I2V(*v) + } + } + + v := LatLng{} + if err := mapstructure.Decode(i, &v); err != nil { + return nil, false + } + return v, true +} + +func (*propertyLatLng) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyLatLng) Validate(i interface{}) bool { + _, ok := i.(LatLng) + return ok +} + +func (v *Value) ValueLatLng() (vv LatLng, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(LatLng) + return +} diff --git a/server/pkg/value/latlng_test.go b/server/pkg/value/latlng_test.go new file mode 100644 index 000000000..f749c88bd --- /dev/null +++ b/server/pkg/value/latlng_test.go @@ -0,0 +1,41 @@ +package value + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLatLng_Clone(t *testing.T) { + tests := []struct { + Name string + LL, Expected *LatLng + }{ + { + Name: "nil latlng", + }, + { + Name: "cloned", + LL: &LatLng{ + Lat: 10, + Lng: 11, + }, + Expected: &LatLng{ + Lat: 10, + Lng: 11, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.LL.Clone() + assert.Equal(t, tc.Expected, res) + if tc.Expected != nil { + assert.NotSame(t, tc.Expected, res) + } + }) + } +} diff --git a/server/pkg/value/latlngheight.go b/server/pkg/value/latlngheight.go new file mode 100644 index 000000000..9dead9880 --- /dev/null +++ b/server/pkg/value/latlngheight.go @@ -0,0 +1,64 @@ +package value + +import "github.com/mitchellh/mapstructure" + +type LatLngHeight struct { + Lat float64 `json:"lat" mapstructure:"lat"` + Lng float64 `json:"lng" mapstructure:"lng"` + Height float64 `json:"height" mapstructure:"height"` +} + +func (l *LatLngHeight) Clone() *LatLngHeight { + if l == nil { + return nil + } + return &LatLngHeight{ + Lat: l.Lat, + Lng: l.Lng, + Height: l.Height, + } +} + +var TypeLatLngHeight Type = "latlngheight" + +type propertyLatLngHeight struct{} + +func (p *propertyLatLngHeight) I2V(i interface{}) (interface{}, bool) { + switch v := i.(type) { + case LatLngHeight: + return v, true + case LatLng: + return LatLngHeight{Lat: v.Lat, Lng: v.Lng, Height: 0}, true + case *LatLngHeight: + if v != nil { + return p.I2V(*v) + } + case *LatLng: + if v != nil { + return p.I2V(*v) + } + } + + v := LatLngHeight{} + if err := mapstructure.Decode(i, &v); err == nil { + return v, true + } + return nil, false +} + +func (*propertyLatLngHeight) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyLatLngHeight) Validate(i interface{}) bool { + _, ok := i.(LatLngHeight) + return ok +} + +func (v *Value) ValueLatLngHeight() (vv LatLngHeight, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(LatLngHeight) + return +} diff --git a/server/pkg/value/latlngheight_test.go b/server/pkg/value/latlngheight_test.go new file mode 100644 index 000000000..2670900de --- /dev/null +++ b/server/pkg/value/latlngheight_test.go @@ -0,0 +1,43 @@ +package value + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLatLngHeight_Clone(t *testing.T) { + tests := []struct { + Name string + LL, Expected *LatLngHeight + }{ + { + Name: "nil LatLngHeight", + }, + { + Name: "cloned", + LL: &LatLngHeight{ + Lat: 10, + Lng: 11, + Height: 12, + }, + Expected: &LatLngHeight{ + Lat: 10, + Lng: 11, + Height: 12, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.LL.Clone() + assert.Equal(t, tc.Expected, res) + if tc.Expected != nil { + assert.NotSame(t, tc.Expected, res) + } + }) + } +} diff --git a/server/pkg/value/number.go b/server/pkg/value/number.go new file mode 100644 index 000000000..5ac456966 --- /dev/null +++ b/server/pkg/value/number.go @@ -0,0 +1,137 @@ +package value + +import ( + "encoding/json" + "strconv" +) + +var TypeNumber Type = "number" + +type propertyNumber struct{} + +func (p *propertyNumber) I2V(i interface{}) (interface{}, bool) { + switch v := i.(type) { + case float64: + return v, true + case float32: + return float64(v), true + case int: + return float64(v), true + case int8: + return float64(v), true + case int16: + return float64(v), true + case int32: + return float64(v), true + case int64: + return float64(v), true + case uint: + return float64(v), true + case uint8: + return float64(v), true + case uint16: + return float64(v), true + case uint32: + return float64(v), true + case uint64: + return float64(v), true + case uintptr: + return float64(v), true + case json.Number: + if f, err := v.Float64(); err == nil { + return f, true + } + case string: + if vfloat64, err := strconv.ParseFloat(v, 64); err == nil { + return vfloat64, true + } + case bool: + if v { + return float64(1), true + } else { + return float64(0), true + } + case *float64: + if v != nil { + return p.I2V(*v) + } + case *float32: + if v != nil { + return p.I2V(*v) + } + case *int: + if v != nil { + return p.I2V(*v) + } + case *int8: + if v != nil { + return p.I2V(*v) + } + case *int16: + if v != nil { + return p.I2V(*v) + } + case *int32: + if v != nil { + return p.I2V(*v) + } + case *int64: + if v != nil { + return p.I2V(*v) + } + case *uint: + if v != nil { + return p.I2V(*v) + } + case *uint8: + if v != nil { + return p.I2V(*v) + } + case *uint16: + if v != nil { + return p.I2V(*v) + } + case *uint32: + if v != nil { + return p.I2V(*v) + } + case *uint64: + if v != nil { + return p.I2V(*v) + } + case *uintptr: + if v != nil { + return p.I2V(*v) + } + case *json.Number: + if v != nil { + return p.I2V(*v) + } + case *string: + if v != nil { + return p.I2V(*v) + } + case *bool: + if v != nil { + return p.I2V(*v) + } + } + return nil, false +} + +func (*propertyNumber) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyNumber) Validate(i interface{}) bool { + _, ok := i.(float64) + return ok +} + +func (v *Value) ValueNumber() (vv float64, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(float64) + return +} diff --git a/server/pkg/value/number_test.go b/server/pkg/value/number_test.go new file mode 100644 index 000000000..06c38a87d --- /dev/null +++ b/server/pkg/value/number_test.go @@ -0,0 +1,101 @@ +package value + +import ( + "encoding/json" + "math" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_propertyNumber_I2V(t *testing.T) { + z1 := 0 + z2 := 0.0 + z3 := "0" + z4 := json.Number("0") + z5 := json.Number("-0") + n1 := 1.12 + n2 := "1.12" + n3 := json.Number("1.12") + nn1 := -0.11 + nn2 := "-0.11" + nn3 := json.Number("-0.11") + nan1 := math.NaN() + nan2 := json.Number("NaN") + inf1 := math.Inf(0) + inf2 := json.Number("Infinity") + infn1 := math.Inf(-1) + infn2 := json.Number("-Infinity") + + tests := []struct { + name string + args []interface{} + want1 interface{} + want2 bool + }{ + { + name: "zero", + args: []interface{}{z1, z2, z3, z4, z5, &z1, &z2, &z3, &z4, &z5}, + want1: 0.0, + want2: true, + }, + { + name: "float", + args: []interface{}{n1, n2, n3, &n1, &n2, &n3}, + want1: 1.12, + want2: true, + }, + { + name: "negative float", + args: []interface{}{nn1, nn2, nn3, &nn1, &nn2, &nn3}, + want1: -0.11, + want2: true, + }, + { + name: "nan", + args: []interface{}{nan1, nan2}, + want1: math.NaN(), + want2: true, + }, + { + name: "inf", + args: []interface{}{inf1, inf2}, + want1: math.Inf(0), + want2: true, + }, + { + name: "negative inf", + args: []interface{}{infn1, infn2}, + want1: math.Inf(-1), + want2: true, + }, + { + name: "nil", + args: []interface{}{"foo", (*float64)(nil), (*string)(nil), (*int)(nil), (*json.Number)(nil), nil}, + want1: nil, + want2: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + p := &propertyNumber{} + for i, v := range tt.args { + got1, got2 := p.I2V(v) + if f, ok := tt.want1.(float64); ok { + if math.IsNaN(f) { + assert.True(t, math.IsNaN(tt.want1.(float64))) + } else { + assert.Equal(t, tt.want1, got1, "test %d", i) + } + } else { + assert.Equal(t, tt.want1, got1, "test %d", i) + } + assert.Equal(t, tt.want2, got2, "test %d", i) + } + }) + } +} diff --git a/server/pkg/value/optional.go b/server/pkg/value/optional.go new file mode 100644 index 000000000..8bf0b084a --- /dev/null +++ b/server/pkg/value/optional.go @@ -0,0 +1,74 @@ +package value + +type Optional struct { + t Type + v *Value +} + +func NewOptional(t Type, v *Value) *Optional { + if t == TypeUnknown || (v != nil && v.Type() != t) { + return nil + } + return &Optional{ + t: t, + v: v, + } +} + +func OptionalFrom(v *Value) *Optional { + if v.Type() == TypeUnknown { + return nil + } + return &Optional{ + t: v.Type(), + v: v, + } +} + +func (ov *Optional) Type() Type { + if ov == nil { + return TypeUnknown + } + return ov.t +} + +func (ov *Optional) Value() *Value { + if ov == nil || ov.t == TypeUnknown || ov.v == nil { + return nil + } + return ov.v.Clone() +} + +func (ov *Optional) TypeAndValue() (Type, *Value) { + return ov.Type(), ov.Value() +} + +func (ov *Optional) SetValue(v *Value) { + if ov == nil || ov.t == TypeUnknown || (v != nil && ov.t != v.Type()) { + return + } + ov.v = v.Clone() +} + +func (ov *Optional) Clone() *Optional { + if ov == nil { + return nil + } + return &Optional{ + t: ov.t, + v: ov.v.Clone(), + } +} + +// Cast tries to convert the value to the new type and generates a new Optional. +func (ov *Optional) Cast(t Type, p TypePropertyMap) *Optional { + if ov == nil || ov.t == TypeUnknown { + return nil + } + if ov.v == nil { + return NewOptional(t, nil) + } + + nv := ov.v.Cast(t, p) + return NewOptional(t, nv) +} diff --git a/server/pkg/value/optional_test.go b/server/pkg/value/optional_test.go new file mode 100644 index 000000000..b6a43e0bd --- /dev/null +++ b/server/pkg/value/optional_test.go @@ -0,0 +1,394 @@ +package value + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewOptional(t *testing.T) { + type args struct { + t Type + v *Value + } + + tests := []struct { + name string + args args + want *Optional + }{ + { + name: "default type", + args: args{ + t: TypeString, + v: TypeString.ValueFrom("foo", nil), + }, + want: &Optional{t: TypeString, v: TypeString.ValueFrom("foo", nil)}, + }, + { + name: "custom type", + args: args{ + t: Type("foo"), + v: &Value{t: Type("foo")}, + }, + want: &Optional{t: Type("foo"), v: &Value{t: Type("foo")}}, + }, + { + name: "nil value", + args: args{ + t: Type("foo"), + }, + want: &Optional{t: Type("foo"), v: nil}, + }, + { + name: "invalid value", + args: args{ + t: TypeNumber, + v: TypeString.ValueFrom("foo", nil), + }, + want: nil, + }, + { + name: "invalid type", + args: args{ + t: TypeUnknown, + v: TypeString.ValueFrom("foo", nil), + }, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, NewOptional(tt.args.t, tt.args.v)) + }) + } +} + +func TestOptionalFrom(t *testing.T) { + type args struct { + v *Value + } + + tests := []struct { + name string + args args + want *Optional + }{ + { + name: "default type", + args: args{ + v: TypeString.ValueFrom("foo", nil), + }, + want: &Optional{t: TypeString, v: TypeString.ValueFrom("foo", nil)}, + }, + { + name: "custom type", + args: args{ + v: &Value{t: Type("foo")}, + }, + want: &Optional{t: Type("foo"), v: &Value{t: Type("foo")}}, + }, + { + name: "invalid value", + args: args{ + v: &Value{v: "string"}, + }, + want: nil, + }, + { + name: "nil value", + args: args{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, OptionalFrom(tt.args.v)) + }) + } +} + +func TestOptional_Type(t *testing.T) { + tests := []struct { + name string + value *Optional + want Type + }{ + { + name: "ok", + value: &Optional{t: Type("foo")}, + want: Type("foo"), + }, + { + name: "empty", + value: &Optional{}, + want: TypeUnknown, + }, + { + name: "nil", + value: nil, + want: TypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Type()) + }) + } +} + +func TestOptional_Value(t *testing.T) { + tests := []struct { + name string + value *Optional + want *Value + }{ + { + name: "ok", + value: &Optional{t: TypeString, v: &Value{t: TypeString, v: "foobar"}}, + want: &Value{t: TypeString, v: "foobar"}, + }, + { + name: "empty", + value: &Optional{}, + want: nil, + }, + { + name: "nil", + value: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.value.Value() + assert.Equal(t, tt.want, res) + if res != nil { + assert.NotSame(t, tt.want, res) + } + }) + } +} + +func TestOptional_TypeAndValue(t *testing.T) { + tests := []struct { + name string + value *Optional + wantt Type + wantv *Value + }{ + { + name: "ok", + value: &Optional{t: TypeString, v: &Value{t: TypeString, v: "foobar"}}, + wantt: TypeString, + wantv: &Value{t: TypeString, v: "foobar"}, + }, + { + name: "empty", + value: &Optional{}, + wantt: TypeUnknown, + wantv: nil, + }, + { + name: "nil", + value: nil, + wantt: TypeUnknown, + wantv: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ty, tv := tt.value.TypeAndValue() + assert.Equal(t, tt.wantt, ty) + assert.Equal(t, tt.wantv, tv) + if tv != nil { + assert.NotSame(t, tt.wantv, tv) + } + }) + } +} + +func TestOptional_SetValue(t *testing.T) { + type args struct { + v *Value + } + + tests := []struct { + name string + value *Optional + args args + invalid bool + }{ + { + name: "set", + value: &Optional{ + t: TypeString, + v: &Value{t: TypeString, v: "foobar"}, + }, + args: args{v: &Value{t: TypeString, v: "bar"}}, + }, + { + name: "set to nil", + value: &Optional{ + t: TypeString, + }, + args: args{v: &Value{t: TypeString, v: "bar"}}, + }, + { + name: "invalid value", + value: &Optional{ + t: TypeNumber, + v: &Value{t: TypeNumber, v: 1}, + }, + args: args{v: &Value{t: TypeString, v: "bar"}}, + invalid: true, + }, + { + name: "nil value", + value: &Optional{ + t: TypeNumber, + v: &Value{t: TypeNumber, v: 1}, + }, + }, + { + name: "empty", + value: &Optional{}, + args: args{v: &Value{t: TypeString, v: "bar"}}, + invalid: true, + }, + { + name: "nil", + args: args{v: &Value{t: TypeString, v: "bar"}}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + var v *Value + if tt.value != nil { + v = tt.value.v + } + + tt.value.SetValue(tt.args.v) + + if tt.value != nil { + if tt.invalid { + assert.Same(t, v, tt.value.v) + } else { + assert.Equal(t, tt.args.v, tt.value.v) + if tt.args.v != nil { + assert.NotSame(t, tt.args.v, tt.value.v) + } + } + } + }) + } +} + +func TestOptional_Clone(t *testing.T) { + tests := []struct { + name string + target *Optional + }{ + { + name: "ok", + target: &Optional{t: TypeString, v: TypeString.ValueFrom("foo", nil)}, + }, + { + name: "empty", + target: &Optional{}, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Clone() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestOptional_Cast(t *testing.T) { + type args struct { + t Type + p TypePropertyMap + } + + tests := []struct { + name string + target *Optional + args args + want *Optional + }{ + { + name: "diff type", + target: &Optional{t: TypeNumber, v: TypeNumber.ValueFrom(1.1, nil)}, + args: args{t: TypeString}, + want: &Optional{t: TypeString, v: TypeString.ValueFrom("1.1", nil)}, + }, + { + name: "same type", + target: &Optional{t: TypeNumber, v: TypeNumber.ValueFrom(1.1, nil)}, + args: args{t: TypeNumber}, + want: &Optional{t: TypeNumber, v: TypeNumber.ValueFrom(1.1, nil)}, + }, + { + name: "nil value", + target: &Optional{t: TypeNumber}, + args: args{t: TypeString}, + want: &Optional{t: TypeString}, + }, + { + name: "failed to cast", + target: &Optional{t: TypeLatLng, v: TypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}, nil)}, + args: args{t: TypeString}, + want: &Optional{t: TypeString}, + }, + { + name: "empty", + target: &Optional{}, + args: args{t: TypeString}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{t: TypeString}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Cast(tt.args.t, tt.args.p)) + }) + } +} diff --git a/server/pkg/value/polygon.go b/server/pkg/value/polygon.go new file mode 100644 index 000000000..2e1e7a0db --- /dev/null +++ b/server/pkg/value/polygon.go @@ -0,0 +1,59 @@ +package value + +import "github.com/mitchellh/mapstructure" + +var TypePolygon Type = "polygon" + +type Polygon []Coordinates + +func PolygonFrom(rings [][]float64) Polygon { + p := make([]Coordinates, 0, len(rings)) + for _, ring := range rings { + p = append(p, CoordinatesFrom(ring)) + } + return p +} + +type propertyPolygon struct{} + +func (p *propertyPolygon) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(Polygon); ok { + return v, true + } + + if v, ok := i.(*Polygon); ok { + if v != nil { + return p.I2V(*v) + } + return nil, false + } + + v := Polygon{} + if err := mapstructure.Decode(i, &v); err == nil { + return v, true + } + + v2 := [][]float64{} + if err := mapstructure.Decode(i, &v); err == nil { + return PolygonFrom(v2), true + } + + return nil, false +} + +func (*propertyPolygon) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyPolygon) Validate(i interface{}) bool { + _, ok := i.(Polygon) + return ok +} + +func (v *Value) ValuePolygon() (vv Polygon, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(Polygon) + return +} diff --git a/server/pkg/value/rect.go b/server/pkg/value/rect.go new file mode 100644 index 000000000..33d5c92de --- /dev/null +++ b/server/pkg/value/rect.go @@ -0,0 +1,49 @@ +package value + +import "github.com/mitchellh/mapstructure" + +var TypeRect Type = "rect" + +type Rect struct { + West float64 `json:"west" mapstructure:"west"` + South float64 `json:"south" mapstructure:"south"` + East float64 `json:"east" mapstructure:"east"` + North float64 `json:"north" mapstructure:"north"` +} + +type propertyRect struct{} + +func (p *propertyRect) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(Rect); ok { + return v, true + } else if v, ok := i.(*Rect); ok { + if v != nil { + return p.I2V(*v) + } + return nil, false + } + + v := Rect{} + if err := mapstructure.Decode(i, &v); err == nil { + return v, false + } + + return nil, false +} + +func (*propertyRect) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyRect) Validate(i interface{}) bool { + _, ok := i.(Rect) + return ok +} + +func (v *Value) ValueRect() (vv Rect, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(Rect) + return +} diff --git a/server/pkg/value/ref.go b/server/pkg/value/ref.go new file mode 100644 index 000000000..0fed13345 --- /dev/null +++ b/server/pkg/value/ref.go @@ -0,0 +1,40 @@ +package value + +import "fmt" + +var TypeRef Type = "ref" + +type propertyRef struct{} + +func (*propertyRef) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(string); ok { + return v, true + } + if v, ok := i.(*string); ok { + return *v, true + } + if v, ok := i.(fmt.Stringer); ok { + return v.String(), true + } + if v, ok := i.(*fmt.Stringer); ok && v != nil { + return (*v).String(), true + } + return nil, false +} + +func (*propertyRef) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyRef) Validate(i interface{}) bool { + _, ok := i.(string) + return ok +} + +func (v *Value) ValueRef() (vv string, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(string) + return +} diff --git a/server/pkg/value/string.go b/server/pkg/value/string.go new file mode 100644 index 000000000..e5a7fb9ed --- /dev/null +++ b/server/pkg/value/string.go @@ -0,0 +1,46 @@ +package value + +import ( + "fmt" + "strconv" +) + +var TypeString Type = "string" + +type propertyString struct{} + +func (p *propertyString) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(string); ok { + return v, true + } else if v, ok := i.(float64); ok { + return strconv.FormatFloat(v, 'f', -1, 64), true + } else if v, ok := i.(bool); ok && v { + return "true", true + } else if v, ok := i.(*string); ok && v != nil { + return p.I2V(*v) + } else if v, ok := i.(*float64); ok && v != nil { + return p.I2V(*v) + } else if v, ok := i.(*bool); ok && v != nil { + return p.I2V(*v) + } else if v, ok := i.(fmt.Stringer); ok && v != nil { + return v.String(), true + } + return nil, false +} + +func (*propertyString) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyString) Validate(i interface{}) bool { + _, ok := i.(string) + return ok +} + +func (v *Value) ValueString() (vv string, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(string) + return +} diff --git a/server/pkg/value/string_test.go b/server/pkg/value/string_test.go new file mode 100644 index 000000000..077ce6295 --- /dev/null +++ b/server/pkg/value/string_test.go @@ -0,0 +1,59 @@ +package value + +import ( + "net/url" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_propertyString_I2V(t *testing.T) { + s := "foobar" + n := 1.12 + u, _ := url.Parse("https://reearth.io") + + tests := []struct { + name string + args []interface{} + want1 interface{} + want2 bool + }{ + { + name: "string", + args: []interface{}{s, &s}, + want1: "foobar", + want2: true, + }, + { + name: "number", + args: []interface{}{n, &n}, + want1: "1.12", + want2: true, + }, + { + name: "url", + args: []interface{}{u}, + want1: "https://reearth.io", + want2: true, + }, + { + name: "nil", + args: []interface{}{(*string)(nil), nil}, + want1: nil, + want2: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + p := &propertyString{} + for i, v := range tt.args { + got1, got2 := p.I2V(v) + assert.Equal(t, tt.want1, got1, "test %d", i) + assert.Equal(t, tt.want2, got2, "test %d", i) + } + }) + } +} diff --git a/server/pkg/value/type.go b/server/pkg/value/type.go new file mode 100644 index 000000000..6c0ee631f --- /dev/null +++ b/server/pkg/value/type.go @@ -0,0 +1,57 @@ +package value + +type Type string + +type TypeProperty interface { + I2V(interface{}) (interface{}, bool) + V2I(interface{}) (interface{}, bool) + Validate(interface{}) bool +} + +type TypePropertyMap = map[Type]TypeProperty + +var TypeUnknown = Type("") + +var defaultTypes = TypePropertyMap{ + TypeBool: &propertyBool{}, + TypeCoordinates: &propertyCoordinates{}, + TypeLatLng: &propertyLatLng{}, + TypeLatLngHeight: &propertyLatLngHeight{}, + TypeNumber: &propertyNumber{}, + TypePolygon: &propertyPolygon{}, + TypeRect: &propertyRect{}, + TypeRef: &propertyRef{}, + TypeString: &propertyString{}, + TypeURL: &propertyURL{}, +} + +func (t Type) Default() bool { + _, ok := defaultTypes[t] + return ok +} + +func (t Type) None() *Optional { + return NewOptional(t, nil) +} + +func (t Type) ValueFrom(i interface{}, p TypePropertyMap) *Value { + if t == TypeUnknown || i == nil { + return nil + } + + if p != nil { + if vt, ok := p[t]; ok && vt != nil { + if v, ok2 := vt.I2V(i); ok2 { + return &Value{p: p, v: v, t: t} + } + } + } + + if vt, ok := defaultTypes[t]; ok && vt != nil { + if v, ok2 := vt.I2V(i); ok2 { + return &Value{p: p, v: v, t: t} + } + } + + return nil +} diff --git a/server/pkg/value/type_test.go b/server/pkg/value/type_test.go new file mode 100644 index 000000000..90191df2f --- /dev/null +++ b/server/pkg/value/type_test.go @@ -0,0 +1,150 @@ +package value + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +type tpmock struct { + TypeProperty +} + +func (*tpmock) I2V(i interface{}) (interface{}, bool) { + return i.(string) + "a", true +} + +func (*tpmock) V2I(v interface{}) (interface{}, bool) { + return v.(string) + "bar", true +} + +func TestType_Default(t *testing.T) { + tests := []struct { + name string + tr Type + want bool + }{ + { + name: "default", + tr: TypeString, + want: true, + }, + { + name: "custom", + tr: Type("foo"), + want: false, + }, + { + name: "unknown", + tr: TypeUnknown, + want: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.tr.Default()) + }) + } +} + +func TestType_None(t *testing.T) { + tests := []struct { + name string + tr Type + want *Optional + }{ + { + name: "default", + tr: TypeString, + want: &Optional{t: TypeString}, + }, + { + name: "custom", + tr: Type("foo"), + want: &Optional{t: Type("foo")}, + }, + { + name: "unknown", + tr: TypeUnknown, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.tr.None()) + }) + } +} + +func TestType_ValueFrom(t *testing.T) { + tpm := TypePropertyMap{ + Type("foo"): &tpmock{}, + } + + type args struct { + i interface{} + p TypePropertyMap + } + + tests := []struct { + name string + tr Type + args args + want *Value + }{ + { + name: "default type", + tr: TypeString, + args: args{ + i: "hoge", + }, + want: &Value{t: TypeString, v: "hoge"}, + }, + { + name: "custom type", + tr: Type("foo"), + args: args{ + i: "hoge", + p: tpm, + }, + want: &Value{p: tpm, t: Type("foo"), v: "hogea"}, + }, + { + name: "nil", + tr: TypeString, + args: args{}, + want: nil, + }, + { + name: "unknown type", + tr: TypeUnknown, + args: args{ + i: "hoge", + }, + want: nil, + }, + { + name: "unknown type + custom type", + tr: Type("bar"), + args: args{ + i: "hoge", + p: tpm, + }, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.tr.ValueFrom(tt.args.i, tt.args.p)) + }) + } +} diff --git a/server/pkg/value/url.go b/server/pkg/value/url.go new file mode 100644 index 000000000..c64df75df --- /dev/null +++ b/server/pkg/value/url.go @@ -0,0 +1,53 @@ +package value + +import "net/url" + +var TypeURL Type = "url" + +type propertyURL struct{} + +func (p *propertyURL) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(url.URL); ok { + return &v, true + } + + if v, ok := i.(*url.URL); ok && v != nil { + return p.I2V(*v) // clone URL + } + + if v, ok := i.(string); ok { + if u, err := url.Parse(v); err == nil { + return u, true + } + } + + if v, ok := i.(*string); ok && v != nil { + return p.I2V(*v) + } + + return nil, false +} + +func (*propertyURL) V2I(v interface{}) (interface{}, bool) { + u, ok := v.(*url.URL) + if !ok { + return nil, false + } + if u == nil { + return "", true + } + return u.String(), true +} + +func (*propertyURL) Validate(i interface{}) bool { + _, ok := i.(*url.URL) + return ok +} + +func (v *Value) ValueURL() (vv *url.URL, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(*url.URL) + return +} diff --git a/server/pkg/value/value.go b/server/pkg/value/value.go new file mode 100644 index 000000000..ffaa79209 --- /dev/null +++ b/server/pkg/value/value.go @@ -0,0 +1,96 @@ +package value + +import ( + "encoding/json" +) + +type Value struct { + p TypePropertyMap + v interface{} + t Type +} + +func (v *Value) IsEmpty() bool { + return v == nil || v.t == TypeUnknown || v.v == nil +} + +func (v *Value) Clone() *Value { + if v.IsEmpty() { + return nil + } + return v.t.ValueFrom(v.v, v.p) +} + +func (v *Value) Some() *Optional { + return OptionalFrom(v) +} + +func (v *Value) Value() interface{} { + if v == nil { + return nil + } + return v.v +} + +func (v *Value) Type() Type { + if v == nil { + return TypeUnknown + } + return v.t +} + +func (v *Value) TypeProperty() (tp TypeProperty) { + if v.IsEmpty() { + return + } + if v.p != nil { + if tp, ok := v.p[v.t]; ok { + return tp + } + } + if tp, ok := defaultTypes[v.t]; ok { + return tp + } + return +} + +// Interface converts the value into generic representation +func (v *Value) Interface() interface{} { + if v == nil || v.t == TypeUnknown { + return nil + } + + if tp := v.TypeProperty(); tp != nil { + if i, ok2 := tp.V2I(v.v); ok2 { + return i + } + } + + return nil +} + +func (v *Value) Validate() bool { + if v == nil || v.t == TypeUnknown { + return false + } + + if tp := v.TypeProperty(); tp != nil { + return tp.Validate(v) + } + + return false +} + +func (v *Value) MarshalJSON() ([]byte, error) { + return json.Marshal(v.Interface()) +} + +func (v *Value) Cast(t Type, p TypePropertyMap) *Value { + if v == nil || v.t == TypeUnknown { + return nil + } + if v.t == t { + return v.Clone() + } + return t.ValueFrom(v.v, p) +} diff --git a/server/pkg/value/value_test.go b/server/pkg/value/value_test.go new file mode 100644 index 000000000..ebd7fdd98 --- /dev/null +++ b/server/pkg/value/value_test.go @@ -0,0 +1,388 @@ +package value + +import ( + "net/url" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestValue_IsEmpty(t *testing.T) { + tests := []struct { + name string + value *Value + want bool + }{ + { + name: "empty", + want: true, + }, + { + name: "nil", + want: true, + }, + { + name: "non-empty", + value: &Value{ + t: Type("hoge"), + v: "foo", + }, + want: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.IsEmpty()) + }) + } +} + +func TestValue_Clone(t *testing.T) { + tp := &tpmock{} + tpm := TypePropertyMap{ + Type("hoge"): tp, + } + + tests := []struct { + name string + value *Value + want *Value + }{ + { + name: "ok", + value: &Value{ + t: TypeString, + v: "foo", + }, + want: &Value{ + t: TypeString, + v: "foo", + }, + }, + { + name: "custom type property", + value: &Value{ + t: Type("hoge"), + v: "foo", + p: tpm, + }, + want: &Value{ + t: Type("hoge"), + v: "fooa", + p: tpm, + }, + }, + { + name: "nil", + value: nil, + want: nil, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Clone()) + }) + } +} + +func TestValue_Some(t *testing.T) { + tp := &tpmock{} + tpm := TypePropertyMap{ + Type("hoge"): tp, + } + + tests := []struct { + name string + value *Value + want *Optional + }{ + { + name: "ok", + value: &Value{ + t: TypeString, + v: "foo", + }, + want: &Optional{ + t: TypeString, + v: &Value{ + t: TypeString, + v: "foo", + }, + }, + }, + { + name: "custom type property", + value: &Value{ + t: Type("hoge"), + v: "fooa", + p: tpm, + }, + want: &Optional{ + t: Type("hoge"), + v: &Value{ + t: Type("hoge"), + v: "fooa", + p: tpm, + }, + }, + }, + { + name: "nil", + value: nil, + want: nil, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Some()) + }) + } +} + +func TestValue_Value(t *testing.T) { + u, _ := url.Parse("https://reearth.io") + + tests := []struct { + name string + value *Value + want interface{} + }{ + { + name: "ok", + value: &Value{t: TypeURL, v: u}, + want: u, + }, + { + name: "empty", + value: &Value{}, + }, + { + name: "nil", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.want == nil { + assert.Nil(t, tt.value.Value()) + } else { + assert.Same(t, tt.want, tt.value.Value()) + } + }) + } +} + +func TestValue_Type(t *testing.T) { + tests := []struct { + name string + value *Value + want Type + }{ + { + name: "ok", + value: &Value{t: TypeString}, + want: TypeString, + }, + { + name: "empty", + value: &Value{}, + want: TypeUnknown, + }, + { + name: "nil", + want: TypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Type()) + }) + } +} + +func TestValue_TypeProperty(t *testing.T) { + tp := &tpmock{} + tpm := TypePropertyMap{ + Type("hoge"): tp, + } + + tests := []struct { + name string + value *Value + want TypeProperty + }{ + { + name: "default type", + value: &Value{ + v: "string", + t: TypeString, + }, + want: defaultTypes[TypeString], + }, + { + name: "custom type", + value: &Value{ + v: "string", + t: Type("hoge"), + p: tpm, + }, + want: tp, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + { + name: "nil", + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.value.TypeProperty() + if tt.want == nil { + assert.Nil(t, res) + } else { + assert.Same(t, tt.want, res) + } + }) + } +} + +func TestValue_Interface(t *testing.T) { + tp := &tpmock{} + tpm := TypePropertyMap{ + "foo": tp, + } + + tests := []struct { + name string + value *Value + want interface{} + }{ + { + name: "string", + value: &Value{t: TypeString, v: "hoge"}, + want: "hoge", + }, + { + name: "latlng", + value: &Value{t: TypeLatLng, v: LatLng{Lat: 1, Lng: 2}}, + want: LatLng{Lat: 1, Lng: 2}, + }, + { + name: "custom", + value: &Value{ + p: tpm, + t: Type("foo"), + v: "foo", + }, + want: "foobar", + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + { + name: "nil", + value: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Interface()) + }) + } +} + +func TestValue_Cast(t *testing.T) { + type args struct { + t Type + p TypePropertyMap + } + + tests := []struct { + name string + target *Value + args args + want *Value + }{ + { + name: "diff type", + target: &Value{t: TypeNumber, v: 1.1}, + args: args{t: TypeString}, + want: &Value{t: TypeString, v: "1.1"}, + }, + { + name: "same type", + target: &Value{t: TypeNumber, v: 1.1}, + args: args{t: TypeNumber}, + want: &Value{t: TypeNumber, v: 1.1}, + }, + { + name: "failed to cast", + target: &Value{t: TypeLatLng, v: LatLng{Lat: 1, Lng: 2}}, + args: args{t: TypeString}, + want: nil, + }, + { + name: "invalid value", + target: &Value{t: TypeNumber}, + args: args{t: TypeString}, + want: nil, + }, + { + name: "empty", + target: &Value{}, + args: args{t: TypeString}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{t: TypeString}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Cast(tt.args.t, tt.args.p)) + }) + } +} diff --git a/server/pkg/visualizer/visualizer.go b/server/pkg/visualizer/visualizer.go new file mode 100644 index 000000000..a9a49cf6c --- /dev/null +++ b/server/pkg/visualizer/visualizer.go @@ -0,0 +1,7 @@ +package visualizer + +type Visualizer string + +const ( + VisualizerCesium Visualizer = "cesium" +) diff --git a/server/pkg/writer/seeker_closer.go b/server/pkg/writer/seeker_closer.go new file mode 100644 index 000000000..c1ea7a72a --- /dev/null +++ b/server/pkg/writer/seeker_closer.go @@ -0,0 +1,55 @@ +package writer + +import ( + "errors" + "io" +) + +// reference: https://stackoverflow.com/questions/45836767/using-an-io-writeseeker-without-a-file-in-go +type WriterSeeker struct { + buffer []byte + position int +} + +func (sc *WriterSeeker) Write(p []byte) (int, error) { + minCap := sc.position + len(p) + if minCap > cap(sc.buffer) { + b2 := make([]byte, len(sc.buffer), minCap+len(p)) + copy(b2, sc.buffer) + sc.buffer = b2 + } + if minCap > len(sc.buffer) { + sc.buffer = sc.buffer[:minCap] + } + copy(sc.buffer[sc.position:], p) + sc.position += len(p) + return len(p), nil +} + +func (sc *WriterSeeker) Seek(offset int64, whence int) (int64, error) { + newPos, offs := 0, int(offset) + switch whence { + case io.SeekStart: + newPos = offs + case io.SeekCurrent: + newPos = sc.position + offs + case io.SeekEnd: + newPos = len(sc.buffer) + offs + } + if newPos < 0 { + return 0, errors.New("negative result pos") + } + sc.position = newPos + return int64(newPos), nil +} + +func (sc *WriterSeeker) WriteTo(w io.Writer) (int64, error) { + i, err := w.Write(sc.buffer) + return int64(i), err +} + +func (sc *WriterSeeker) Buffer() []byte { + b := make([]byte, len(sc.buffer)) + copy(b, sc.buffer) + return b +} diff --git a/server/pkg/writer/seeker_closer_test.go b/server/pkg/writer/seeker_closer_test.go new file mode 100644 index 000000000..1eeed3c2c --- /dev/null +++ b/server/pkg/writer/seeker_closer_test.go @@ -0,0 +1,124 @@ +package writer + +import ( + "bytes" + "errors" + "io" + "testing" + + "github.com/stretchr/testify/assert" +) + +var _ io.WriteSeeker = (*WriterSeeker)(nil) + +//reference: https://github.com/orcaman/writerseeker/blob/master/writerseeker_test.go + +func TestWrite(t *testing.T) { + tests := []struct { + Name string + Input []byte + WS *WriterSeeker + ExpectedBuffer []byte + ExpectedPosition int + err error + }{ + { + Name: "write a string", + Input: []byte("xxxx"), + WS: &WriterSeeker{}, + ExpectedBuffer: []byte("xxxx"), + ExpectedPosition: 4, + err: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + n, err := tt.WS.Write(tt.Input) + if tt.err == nil { + assert.Equal(t, tt.ExpectedBuffer, tt.WS.Buffer()) + assert.Equal(t, tt.ExpectedPosition, n) + } else { + assert.Equal(t, tt.err, err) + } + }) + } +} + +func TestSeek(t *testing.T) { + ws := &WriterSeeker{} + _, _ = ws.Write([]byte("xxxxxx")) + + tests := []struct { + Name string + WS *WriterSeeker + Whence int + Offset, ExpectedPosition int64 + err error + }{ + { + Name: "whence start", + WS: ws, + Offset: 1, + Whence: 0, // could use io.SeekStart as well + ExpectedPosition: 1, + err: nil, + }, + { + Name: "whence current position", + WS: ws, + Offset: 1, + Whence: 1, + ExpectedPosition: 2, + err: nil, + }, + { + Name: "end position", + WS: ws, + Offset: 1, + Whence: 2, + ExpectedPosition: 7, + err: nil, + }, + { + Name: "fail negative position", + WS: ws, + Offset: -100, + Whence: 0, + ExpectedPosition: 0, + err: errors.New("negative result pos"), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + // This test should be sequential + + n, err := tt.WS.Seek(tt.Offset, tt.Whence) + if tt.err == nil { + assert.Equal(t, tt.ExpectedPosition, n) + } else { + assert.Equal(t, err, tt.err) + } + }) + } +} + +func TestWriterSeeker_WriteTo(t *testing.T) { + ws := &WriterSeeker{} + buf := bytes.NewBufferString("") + _, _ = ws.Write([]byte("xxxx")) + n, err := ws.WriteTo(buf) + assert.NoError(t, err) + assert.Equal(t, int64(4), n) + assert.Equal(t, "xxxx", buf.String()) +} + +func TestWriterSeeker_Buffer(t *testing.T) { + ws := &WriterSeeker{} + _, _ = ws.Write([]byte("xxxx")) + assert.Equal(t, []byte("xxxx"), ws.Buffer()) +} diff --git a/server/schema.graphql b/server/schema.graphql new file mode 100644 index 000000000..75910a0c9 --- /dev/null +++ b/server/schema.graphql @@ -0,0 +1,1696 @@ +# Built-in + +scalar Upload +scalar Any + +directive @goModel( + model: String + models: [String!] +) on OBJECT | INPUT_OBJECT | SCALAR | ENUM | INTERFACE | UNION + +directive @goField( + forceResolver: Boolean + name: String +) on INPUT_FIELD_DEFINITION | FIELD_DEFINITION + +# Meta Type + +scalar Cursor + +interface Node { + id: ID! +} + +enum NodeType { + ASSET + USER + TEAM + PROJECT + PLUGIN + SCENE + PROPERTY_SCHEMA + PROPERTY + DATASET_SCHEMA + DATASET + LAYER_GROUP + LAYER_ITEM +} + +type PageInfo { + startCursor: Cursor + endCursor: Cursor + hasNextPage: Boolean! + hasPreviousPage: Boolean! +} + +# Basic types + +scalar DateTime +scalar URL +scalar Lang +scalar FileSize +scalar TranslatedString + +type LatLng { + lat: Float! + lng: Float! +} + +type LatLngHeight { + lat: Float! + lng: Float! + height: Float! +} + +type Camera { + lat: Float! + lng: Float! + altitude: Float! + heading: Float! + pitch: Float! + roll: Float! + fov: Float! +} + +type Typography { + fontFamily: String + fontWeight: String + fontSize: Int + color: String + textAlign: TextAlign + bold: Boolean + italic: Boolean + underline: Boolean +} + +type Rect { + west: Float! + south: Float! + east: Float! + north: Float! +} + +input Pagination{ + first: Int + last: Int + after: Cursor + before: Cursor +} + +enum TextAlign { + LEFT + CENTER + RIGHT + JUSTIFY + JUSTIFY_ALL +} + +enum ValueType { + BOOL + NUMBER + STRING + REF + URL + LATLNG + LATLNGHEIGHT + CAMERA + TYPOGRAPHY + COORDINATES + POLYGON + RECT +} + +enum ListOperation { + ADD + MOVE + REMOVE +} + +enum Theme { + DEFAULT + LIGHT + DARK +} + +# Asset + +type Asset implements Node { + id: ID! + createdAt: DateTime! + teamId: ID! + name: String! + size: FileSize! + url: String! + contentType: String! + team: Team @goField(forceResolver: true) +} + +enum AssetSortType { + DATE + SIZE + NAME +} + +# User + +type User implements Node { + id: ID! + name: String! + email: String! +} + +type Me { + id: ID! + name: String! + email: String! + lang: Lang! + theme: Theme! + myTeamId: ID! + auths: [String!]! + teams: [Team!]! @goField(forceResolver: true) + myTeam: Team! @goField(forceResolver: true) +} + +type ProjectAliasAvailability { + alias: String! + available: Boolean! +} + +type Team implements Node { + id: ID! + name: String! + members: [TeamMember!]! + personal: Boolean! + assets( + first: Int + last: Int + after: Cursor + before: Cursor + ): AssetConnection! @goField(forceResolver: true) + projects( + includeArchived: Boolean + first: Int + last: Int + after: Cursor + before: Cursor + ): ProjectConnection! @goField(forceResolver: true) +} + +type TeamMember { + userId: ID! + role: Role! + user: User @goField(forceResolver: true) +} + +enum Role { + # a role who can read project + READER + # a role who can read and write project + WRITER + # a eole who can have full controll of project + OWNER +} + +# Project + +type Project implements Node { + id: ID! + isArchived: Boolean! + isBasicAuthActive: Boolean! + basicAuthUsername: String! + basicAuthPassword: String! + createdAt: DateTime! + updatedAt: DateTime! + publishedAt: DateTime + name: String! + description: String! + alias: String! + publicTitle: String! + publicDescription: String! + publicImage: String! + publicNoIndex: Boolean! + imageUrl: URL + teamId: ID! + visualizer: Visualizer! + publishmentStatus: PublishmentStatus! + team: Team @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) +} + +enum Visualizer { + CESIUM +} + +enum PublishmentStatus { + PUBLIC + LIMITED + PRIVATE +} + +# Plugin + +type Plugin { + id: ID! + sceneId: ID + name: String! + version: String! + description: String! + author: String! + repositoryUrl: String! + propertySchemaId: ID + extensions: [PluginExtension!]! + scenePlugin(sceneId: ID): ScenePlugin + allTranslatedDescription: TranslatedString + allTranslatedName: TranslatedString + scene: Scene @goField(forceResolver: true) + translatedName(lang: Lang): String! @goField(forceResolver: true) + translatedDescription(lang: Lang): String! @goField(forceResolver: true) + propertySchema: PropertySchema @goField(forceResolver: true) +} + +enum WidgetAreaAlign { + START + CENTERED + END +} + +enum WidgetZoneType { + INNER + OUTER +} + +enum WidgetSectionType { + LEFT + CENTER + RIGHT +} + +enum WidgetAreaType { + TOP + MIDDLE + BOTTOM +} + +type WidgetLocation { + zone: WidgetZoneType! + section: WidgetSectionType! + area: WidgetAreaType! +} + +type WidgetExtendable { + vertically: Boolean! + horizontally: Boolean! +} + +type WidgetLayout { + extendable: WidgetExtendable! + extended: Boolean! + floating: Boolean! + defaultLocation: WidgetLocation +} + +enum PluginExtensionType { + PRIMITIVE + WIDGET + BLOCK + VISUALIZER + INFOBOX +} + +type PluginExtension { + extensionId: ID! + pluginId: ID! + type: PluginExtensionType! + name: String! + description: String! + icon: String! + singleOnly: Boolean + widgetLayout: WidgetLayout + visualizer: Visualizer + propertySchemaId: ID! + allTranslatedName: TranslatedString + allTranslatedDescription: TranslatedString + plugin: Plugin @goField(forceResolver: true) + sceneWidget(sceneId: ID!): SceneWidget @goField(forceResolver: true) + propertySchema: PropertySchema @goField(forceResolver: true) + translatedName(lang: Lang): String! @goField(forceResolver: true) + translatedDescription(lang: Lang): String! @goField(forceResolver: true) +} + +# Scene + +type Scene implements Node { + id: ID! + projectId: ID! + teamId: ID! + propertyId: ID! + createdAt: DateTime! + updatedAt: DateTime! + rootLayerId: ID! + widgets: [SceneWidget!]! + plugins: [ScenePlugin!]! + widgetAlignSystem: WidgetAlignSystem + dynamicDatasetSchemas: [DatasetSchema!]! + project: Project @goField(forceResolver: true) + team: Team @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + rootLayer: LayerGroup @goField(forceResolver: true) + datasetSchemas( + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetSchemaConnection! @goField(forceResolver: true) + tagIds: [ID!]! + tags: [Tag!]! @goField(forceResolver: true) + clusters: [Cluster!]! +} + +type SceneWidget { + id: ID! + pluginId: ID! + extensionId: ID! + propertyId: ID! + enabled: Boolean! + extended: Boolean! + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + property: Property @goField(forceResolver: true) +} + +type ScenePlugin { + pluginId: ID! + propertyId: ID + plugin: Plugin @goField(forceResolver: true) + property: Property @goField(forceResolver: true) +} + +type WidgetAlignSystem { + inner: WidgetZone + outer: WidgetZone +} + +type WidgetZone { + left: WidgetSection + center: WidgetSection + right: WidgetSection +} + +type WidgetSection { + top: WidgetArea + middle: WidgetArea + bottom: WidgetArea +} + +type WidgetArea { + widgetIds: [ID!]! + align: WidgetAreaAlign! +} + +# Property + +type PropertySchema { + id: ID! + groups: [PropertySchemaGroup!]! + linkableFields: PropertyLinkableFields! +} + +type PropertyLinkableFields { + schemaId: ID! + latlng: ID + url: ID + latlngField: PropertySchemaField @goField(forceResolver: true) + urlField: PropertySchemaField @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) +} + +type PropertySchemaGroup { + schemaGroupId: ID! + schemaId: ID! + fields: [PropertySchemaField!]! + isList: Boolean! + isAvailableIf: PropertyCondition + title: String + allTranslatedTitle: TranslatedString + representativeFieldId: ID + representativeField: PropertySchemaField + schema: PropertySchema @goField(forceResolver: true) + translatedTitle(lang: Lang): String! @goField(forceResolver: true) +} + +type PropertySchemaField { + fieldId: ID! + type: ValueType! + title: String! + description: String! + prefix: String + suffix: String + defaultValue: Any + ui: PropertySchemaFieldUI + min: Float + max: Float + choices: [PropertySchemaFieldChoice!] + isAvailableIf: PropertyCondition + allTranslatedTitle: TranslatedString + allTranslatedDescription: TranslatedString + translatedTitle(lang: Lang): String! @goField(forceResolver: true) + translatedDescription(lang: Lang): String! @goField(forceResolver: true) +} + +enum PropertySchemaFieldUI { + LAYER + MULTILINE + SELECTION + COLOR + RANGE + SLIDER + IMAGE + VIDEO + FILE + CAMERA_POSE + DATETIME +} + +type PropertySchemaFieldChoice { + key: String! + title: String! + icon: String + allTranslatedTitle: TranslatedString + translatedTitle(lang: Lang): String! @goField(forceResolver: true) +} + +type PropertyCondition { + fieldId: ID! + type: ValueType! + value: Any +} + +type Property implements Node { + id: ID! + schemaId: ID! + items: [PropertyItem!]! + schema: PropertySchema @goField(forceResolver: true) + layer: Layer @goField(forceResolver: true) + merged: MergedProperty @goField(forceResolver: true) +} + +union PropertyItem = PropertyGroup | PropertyGroupList + +type PropertyGroup { + id: ID! + schemaId: ID! + schemaGroupId: ID! + fields: [PropertyField!]! + schema: PropertySchema @goField(forceResolver: true) + schemaGroup: PropertySchemaGroup @goField(forceResolver: true) +} + +type PropertyGroupList { + id: ID! + schemaId: ID! + schemaGroupId: ID! + groups: [PropertyGroup!]! + schema: PropertySchema @goField(forceResolver: true) + schemaGroup: PropertySchemaGroup @goField(forceResolver: true) +} + +type PropertyField { + id: String! + parentId: ID! + schemaId: ID! + fieldId: ID! + links: [PropertyFieldLink!] + type: ValueType! + value: Any + parent: Property @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + field: PropertySchemaField @goField(forceResolver: true) + actualValue: Any @goField(forceResolver: true) +} + +type PropertyFieldLink { + datasetId: ID + datasetSchemaId: ID! + datasetSchemaFieldId: ID! + dataset: Dataset @goField(forceResolver: true) + datasetField: DatasetField @goField(forceResolver: true) + datasetSchema: DatasetSchema @goField(forceResolver: true) + datasetSchemaField: DatasetSchemaField @goField(forceResolver: true) +} + +type MergedProperty { + originalId: ID + parentId: ID + # note: schemaId will not always be set + schemaId: ID + linkedDatasetId: ID + original: Property @goField(forceResolver: true) + parent: Property @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + groups: [MergedPropertyGroup!]! @goField(forceResolver: true) +} + +type MergedPropertyGroup { + originalPropertyId: ID + parentPropertyId: ID + originalId: ID + parentId: ID + schemaGroupId: ID! + # note: schemaId will not always be set + schemaId: ID + linkedDatasetId: ID + fields: [MergedPropertyField!]! + groups: [MergedPropertyGroup!]! + originalProperty: Property @goField(forceResolver: true) + parentProperty: Property @goField(forceResolver: true) + original: PropertyGroup @goField(forceResolver: true) + parent: PropertyGroup @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) +} + +type MergedPropertyField { + schemaId: ID! + fieldId: ID! + value: Any + type: ValueType! + links: [PropertyFieldLink!] + overridden: Boolean! + schema: PropertySchema @goField(forceResolver: true) + field: PropertySchemaField @goField(forceResolver: true) + actualValue: Any @goField(forceResolver: true) +} + +# Dataset + +type DatasetSchema implements Node { + id: ID! + source: String! + name: String! + sceneId: ID! + fields: [DatasetSchemaField!]! + totalCount: Int! + representativeFieldId: ID + dynamic: Boolean + datasets( + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetConnection! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + representativeField: DatasetSchemaField @goField(forceResolver: true) +} + +type DatasetSchemaField implements Node { + id: ID! + source: String! + name: String! + type: ValueType! + schemaId: ID! + refId: ID + schema: DatasetSchema @goField(forceResolver: true) + ref: DatasetSchema @goField(forceResolver: true) +} + +type Dataset implements Node { + id: ID! + source: String! + schemaId: ID! + fields: [DatasetField!]! + schema: DatasetSchema @goField(forceResolver: true) + name: String @goField(forceResolver: true) +} + +type DatasetField { + fieldId: ID! + schemaId: ID! + source: String! + type: ValueType! + value: Any + schema: DatasetSchema @goField(forceResolver: true) + field: DatasetSchemaField @goField(forceResolver: true) + valueRef: Dataset @goField(forceResolver: true) +} + +# Layer + +interface Layer { + id: ID! + sceneId: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: ID + extensionId: ID + infobox: Infobox + # parentId will not be always set + parentId: ID + parent: LayerGroup + property: Property + plugin: Plugin + extension: PluginExtension + scenePlugin: ScenePlugin + tags: [LayerTag!]! +} + +enum LayerEncodingFormat { + KML + CZML + GEOJSON + SHAPE + REEARTH +} + +type LayerItem implements Layer { + id: ID! + sceneId: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: ID + extensionId: ID + infobox: Infobox + # parentId will not be always set + parentId: ID + linkedDatasetId: ID + tags: [LayerTag!]! + parent: LayerGroup @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedLayer @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) +} + +type LayerGroup implements Layer { + id: ID! + sceneId: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: ID + extensionId: ID + infobox: Infobox + # parentId will not be always set + parentId: ID + linkedDatasetSchemaId: ID + root: Boolean! + layerIds: [ID!]! + tags: [LayerTag!]! + parent: LayerGroup @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) + layers: [Layer]! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) +} + +type Infobox { + sceneId: ID! + layerId: ID! + propertyId: ID! + fields: [InfoboxField!]! + linkedDatasetId: ID + layer: Layer! @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedInfobox @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) +} + +type InfoboxField { + id: ID! + sceneId: ID! + layerId: ID! + propertyId: ID! + pluginId: ID! + extensionId: ID! + linkedDatasetId: ID + layer: Layer! @goField(forceResolver: true) + infobox: Infobox! @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedInfoboxField @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) +} + +interface LayerTag { + tagId: ID! + tag: Tag +} + +type LayerTagItem implements LayerTag { + tagId: ID! + tag: Tag @goField(forceResolver: true) +} + +type LayerTagGroup implements LayerTag { + tagId: ID! + children: [LayerTagItem!]! + tag: Tag @goField(forceResolver: true) +} + +type MergedLayer { + originalId: ID! + parentId: ID + sceneID: ID! + property: MergedProperty + infobox: MergedInfobox + original: LayerItem @goField(forceResolver: true) + parent: LayerGroup @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) +} + +type MergedInfobox { + sceneID: ID! + property: MergedProperty + fields: [MergedInfoboxField!]! + scene: Scene @goField(forceResolver: true) +} + +type MergedInfoboxField { + originalId: ID! + sceneID: ID! + pluginId: ID! + extensionId: ID! + property: MergedProperty + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) +} + +interface Tag { + id: ID! + sceneId: ID! + label: String! + layers: [Layer!]! @goField(forceResolver: true) +} + +type TagItem implements Tag { + id: ID! + sceneId: ID! + label: String! + parentId: ID + linkedDatasetID: ID + linkedDatasetSchemaID: ID + linkedDatasetFieldID: ID + linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + linkedDatasetField: DatasetField @goField(forceResolver: true) + parent: TagGroup @goField(forceResolver: true) + layers: [Layer!]! @goField(forceResolver: true) +} + +type TagGroup implements Tag { + id: ID! + sceneId: ID! + label: String! + tagIds: [ID!] + tags: [TagItem!]! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + layers: [Layer!]! @goField(forceResolver: true) +} + +type Cluster { + id: ID! + name: String! + propertyId: ID! + property: Property @goField(forceResolver: true) +} + +# InputType + +input CreateAssetInput { + teamId: ID! + file: Upload! +} + +input RemoveAssetInput { + assetId: ID! +} + +input SignupInput { + lang: Lang + theme: Theme + userId: ID + teamId: ID + secret: String +} + +input UpdateMeInput { + name: String + email: String + lang: Lang + theme: Theme + password: String + passwordConfirmation: String +} + +input RemoveMyAuthInput { + auth: String! +} + +input DeleteMeInput { + userId: ID! +} + +input CreateTeamInput { + name: String! +} + +input UpdateTeamInput { + teamId: ID! + name: String! +} + +input AddMemberToTeamInput { + teamId: ID! + userId: ID! + role: Role! +} + +input RemoveMemberFromTeamInput { + teamId: ID! + userId: ID! +} + +input UpdateMemberOfTeamInput { + teamId: ID! + userId: ID! + role: Role! +} + +input DeleteTeamInput { + teamId: ID! +} + +input CreateProjectInput { + teamId: ID! + visualizer: Visualizer! + name: String + description: String + imageUrl: URL + alias: String + archived: Boolean +} + +input UpdateProjectInput { + projectId: ID! + name: String + description: String + archived: Boolean + isBasicAuthActive: Boolean + basicAuthUsername: String + basicAuthPassword: String + alias: String + imageUrl: URL + publicTitle: String + publicDescription: String + publicImage: String + publicNoIndex: Boolean + deleteImageUrl: Boolean + deletePublicImage: Boolean +} + +input UploadPluginInput { + sceneId: ID! + file: Upload + url: URL +} + +input CreateSceneInput { + projectId: ID! +} + +input PublishProjectInput { + projectId: ID! + alias: String + status: PublishmentStatus! +} + +input DeleteProjectInput { + projectId: ID! +} + +input WidgetLocationInput { + zone: WidgetZoneType! + section: WidgetSectionType! + area: WidgetAreaType! +} + +input AddWidgetInput { + sceneId: ID! + pluginId: ID! + extensionId: ID! +} + +input UpdateWidgetInput { + sceneId: ID! + widgetId: ID! + enabled: Boolean + location: WidgetLocationInput + extended: Boolean + index: Int +} + +input UpdateWidgetAlignSystemInput { + sceneId: ID! + location: WidgetLocationInput! + align: WidgetAreaAlign +} + +input RemoveWidgetInput { + sceneId: ID! + widgetId: ID! +} + +input InstallPluginInput { + sceneId: ID! + pluginId: ID! +} + +input UninstallPluginInput { + sceneId: ID! + pluginId: ID! +} + +input UpgradePluginInput { + sceneId: ID! + pluginId: ID! + toPluginId: ID! +} + +input SyncDatasetInput { + sceneId: ID! + url: String! +} + +input UpdatePropertyValueInput { + propertyId: ID! + schemaGroupId: ID + itemId: ID + fieldId: ID! + value: Any + type: ValueType! +} + +input RemovePropertyFieldInput { + propertyId: ID! + schemaGroupId: ID + itemId: ID + fieldId: ID! +} + +input UploadFileToPropertyInput { + propertyId: ID! + schemaGroupId: ID + itemId: ID + fieldId: ID! + file: Upload! +} + +input LinkDatasetToPropertyValueInput { + propertyId: ID! + schemaGroupId: ID + itemId: ID + fieldId: ID! + datasetSchemaIds: [ID!]! + datasetSchemaFieldIds: [ID!]! + datasetIds: [ID!] +} + +input UnlinkPropertyValueInput { + propertyId: ID! + schemaGroupId: ID + itemId: ID + fieldId: ID! +} + +input AddPropertyItemInput { + propertyId: ID! + schemaGroupId: ID! + index: Int + nameFieldValue: Any + nameFieldType: ValueType +} + +input MovePropertyItemInput { + propertyId: ID! + schemaGroupId: ID! + itemId: ID! + index: Int! +} + +input RemovePropertyItemInput { + propertyId: ID! + schemaGroupId: ID! + itemId: ID! +} + +input UpdatePropertyItemInput { + propertyId: ID! + schemaGroupId: ID! + operations: [UpdatePropertyItemOperationInput!]! +} + +input UpdatePropertyItemOperationInput { + operation: ListOperation! + itemId: ID + index: Int + nameFieldValue: Any + nameFieldType: ValueType +} + +input AddLayerItemInput { + parentLayerId: ID! + pluginId: ID! + extensionId: ID! + index: Int + name: String + lat: Float + lng: Float +} + +input AddLayerGroupInput { + parentLayerId: ID! + pluginId: ID + extensionId: ID + index: Int + linkedDatasetSchemaID: ID + name: String + representativeFieldId: ID +} + +input RemoveLayerInput { + layerId: ID! +} + +input UpdateLayerInput { + layerId: ID! + name: String + visible: Boolean +} + +input MoveLayerInput { + layerId: ID! + destLayerId: ID + index: Int +} + +input CreateInfoboxInput { + layerId: ID! +} + +input RemoveInfoboxInput { + layerId: ID! +} + +input AddInfoboxFieldInput { + layerId: ID! + pluginId: ID! + extensionId: ID! + index: Int +} + +input MoveInfoboxFieldInput { + layerId: ID! + infoboxFieldId: ID! + index: Int! +} + +input RemoveInfoboxFieldInput { + layerId: ID! + infoboxFieldId: ID! +} + +input UpdateDatasetSchemaInput { + schemaId: ID! + name: String! +} + +input AddDynamicDatasetSchemaInput { + sceneId: ID! +} + +input AddDynamicDatasetInput { + datasetSchemaId: ID! + author: String! + content: String! + lat: Float + lng: Float + target: String +} + +input RemoveDatasetSchemaInput { + schemaId: ID! + force: Boolean +} + +input ImportLayerInput { + layerId: ID! + file: Upload! + format: LayerEncodingFormat! +} + +input ImportDatasetInput { + file: Upload! + sceneId: ID! + datasetSchemaId: ID +} + +input ImportDatasetFromGoogleSheetInput { + accessToken: String! + fileId: String! + sheetName: String! + sceneId: ID! + datasetSchemaId: ID +} + +input AddDatasetSchemaInput { + sceneId: ID! + name: String! + representativefield: ID +} + +input CreateTagItemInput { + sceneId: ID! + label: String! + parent: ID + linkedDatasetSchemaID: ID + linkedDatasetID: ID + linkedDatasetField: ID +} + +input CreateTagGroupInput { + sceneId: ID! + label: String! + tags: [ID!] +} + +input UpdateTagInput { + tagId: ID! + sceneId: ID! + label: String +} + +input AttachTagItemToGroupInput { + itemID: ID! + groupID: ID! +} + +input DetachTagItemFromGroupInput { + itemID: ID! + groupID: ID! +} + +input AttachTagToLayerInput { + tagID: ID! + layerID: ID! +} + +input DetachTagFromLayerInput { + tagID: ID! + layerID: ID! +} + +input RemoveTagInput { + tagID: ID! +} + +input AddClusterInput { + sceneId: ID! + name: String! +} + +input UpdateClusterInput { + clusterId: ID! + sceneId: ID! + name: String + propertyId: ID +} + +input RemoveClusterInput { + clusterId: ID! + sceneId: ID! +} + +# Payload + +type CreateAssetPayload { + asset: Asset! +} + +type RemoveAssetPayload { + assetId: ID! +} + +type UpdateMePayload { + me: Me! +} + +type SignupPayload { + user: User! + team: Team! +} + +type DeleteMePayload { + userId: ID! +} + +type CreateTeamPayload { + team: Team! +} + +type UpdateTeamPayload { + team: Team! +} + +type AddMemberToTeamPayload { + team: Team! +} + +type RemoveMemberFromTeamPayload { + team: Team! +} + +type UpdateMemberOfTeamPayload { + team: Team! +} + +type DeleteTeamPayload { + teamId: ID! +} + +type ProjectPayload { + project: Project! +} + +type DeleteProjectPayload { + projectId: ID! +} + +type UploadPluginPayload { + plugin: Plugin! + scene: Scene! + scenePlugin: ScenePlugin! +} + +type CreateScenePayload { + scene: Scene! +} + +type AddWidgetPayload { + scene: Scene! + sceneWidget: SceneWidget! +} + +type UpdateWidgetPayload { + scene: Scene! + sceneWidget: SceneWidget! +} + +type UpdateWidgetAlignSystemPayload { + scene: Scene! +} + +type RemoveWidgetPayload { + scene: Scene! + widgetId: ID! +} + +type InstallPluginPayload { + scene: Scene! + scenePlugin: ScenePlugin! +} + +type UninstallPluginPayload { + pluginId: ID! + scene: Scene! +} + +type UpgradePluginPayload { + scene: Scene! + scenePlugin: ScenePlugin! +} + +type SyncDatasetPayload { + sceneId: ID! + url: String! + datasetSchema: [DatasetSchema!]! + dataset: [Dataset!]! +} + +type PropertyFieldPayload { + property: Property! + propertyField: PropertyField +} + +type PropertyItemPayload { + property: Property! + propertyItem: PropertyItem +} + +type AddLayerItemPayload { + layer: LayerItem! + parentLayer: LayerGroup! + index: Int +} + +type AddLayerGroupPayload { + layer: LayerGroup! + parentLayer: LayerGroup! + index: Int +} + +type RemoveLayerPayload { + layerId: ID! + parentLayer: LayerGroup! +} + +type UpdateLayerPayload { + layer: Layer! +} + +type MoveLayerPayload { + layerId: ID! + fromParentLayer: LayerGroup! + toParentLayer: LayerGroup! + index: Int! +} + +type CreateInfoboxPayload { + layer: Layer! +} + +type RemoveInfoboxPayload { + layer: Layer! +} + +type AddInfoboxFieldPayload { + infoboxField: InfoboxField! + layer: Layer! +} + +type MoveInfoboxFieldPayload { + infoboxFieldId: ID! + layer: Layer! + index: Int! +} + +type RemoveInfoboxFieldPayload { + infoboxFieldId: ID! + layer: Layer! +} + +type UpdateDatasetSchemaPayload { + datasetSchema: DatasetSchema +} + +type RemoveDatasetSchemaPayload { + schemaId: ID! +} + +type AddDynamicDatasetSchemaPayload { + datasetSchema: DatasetSchema +} + +type AddDynamicDatasetPayload { + datasetSchema: DatasetSchema + dataset: Dataset +} + +type ImportLayerPayload { + layers: [Layer!]! + parentLayer: LayerGroup! +} + +type ImportDatasetPayload { + datasetSchema: DatasetSchema! +} + +type AddDatasetSchemaPayload { + datasetSchema: DatasetSchema +} + +type CreateTagItemPayload { + tag: TagItem! + parent: TagGroup +} + +type CreateTagGroupPayload { + tag: TagGroup! +} + +type AttachTagItemToGroupPayload { + tag: TagGroup! +} + +type DetachTagItemFromGroupPayload { + tag: TagGroup! +} + +type UpdateTagPayload { + tag: Tag! +} + +type AttachTagToLayerPayload { + layer: Layer! +} + +type DetachTagFromLayerPayload { + layer: Layer! +} + +type RemoveTagPayload { + tagId: ID! + updatedLayers: [Layer!]! +} + +type AddClusterPayload { + scene: Scene! + cluster: Cluster! +} + +type UpdateClusterPayload { + scene: Scene! + cluster: Cluster! +} + +type RemoveClusterPayload { + scene: Scene! + clusterId: ID! +} + +# Connection + +type AssetConnection { + edges: [AssetEdge!]! + nodes: [Asset]! + pageInfo: PageInfo! + totalCount: Int! +} + +type AssetEdge { + cursor: Cursor! + node: Asset +} + +type ProjectConnection { + edges: [ProjectEdge!]! + nodes: [Project]! + pageInfo: PageInfo! + totalCount: Int! +} + +type ProjectEdge { + cursor: Cursor! + node: Project +} + +type DatasetSchemaConnection { + edges: [DatasetSchemaEdge!]! + nodes: [DatasetSchema]! + pageInfo: PageInfo! + totalCount: Int! +} + +type DatasetSchemaEdge { + cursor: Cursor! + node: DatasetSchema +} + +type DatasetConnection { + edges: [DatasetEdge!]! + nodes: [Dataset]! + pageInfo: PageInfo! + totalCount: Int! +} + +type DatasetEdge { + cursor: Cursor! + node: Dataset +} + +# Query + +type Query { + me: Me + node(id: ID!, type: NodeType!): Node + nodes(id: [ID!]!, type: NodeType!): [Node]! + propertySchema(id: ID!): PropertySchema + propertySchemas(id: [ID!]!): [PropertySchema!]! + plugin(id: ID!): Plugin + plugins(id: [ID!]!): [Plugin!]! + layer(id: ID!): Layer + scene(projectId: ID!): Scene + assets( + teamId: ID! + keyword: String + sort: AssetSortType + pagination: Pagination + ): AssetConnection! + projects( + teamId: ID! + includeArchived: Boolean + first: Int + last: Int + after: Cursor + before: Cursor + ): ProjectConnection! + datasetSchemas( + sceneId: ID! + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetSchemaConnection! + datasets( + datasetSchemaId: ID! + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetConnection! + dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! + searchUser(nameOrEmail: String!): User + checkProjectAlias(alias: String!): ProjectAliasAvailability! +} + +# Mutation + +type Mutation { + # Asset + createAsset(input: CreateAssetInput!): CreateAssetPayload + removeAsset(input: RemoveAssetInput!): RemoveAssetPayload + + # User + signup(input: SignupInput!): SignupPayload + updateMe(input: UpdateMeInput!): UpdateMePayload + removeMyAuth(input: RemoveMyAuthInput!): UpdateMePayload + deleteMe(input: DeleteMeInput!): DeleteMePayload + + # Team + createTeam(input: CreateTeamInput!): CreateTeamPayload + deleteTeam(input: DeleteTeamInput!): DeleteTeamPayload + updateTeam(input: UpdateTeamInput!): UpdateTeamPayload + addMemberToTeam(input: AddMemberToTeamInput!): AddMemberToTeamPayload + removeMemberFromTeam( + input: RemoveMemberFromTeamInput! + ): RemoveMemberFromTeamPayload + updateMemberOfTeam(input: UpdateMemberOfTeamInput!): UpdateMemberOfTeamPayload + + # Project + createProject(input: CreateProjectInput!): ProjectPayload + updateProject(input: UpdateProjectInput!): ProjectPayload + publishProject(input: PublishProjectInput!): ProjectPayload + deleteProject(input: DeleteProjectInput!): DeleteProjectPayload + + # Scene + createScene(input: CreateSceneInput!): CreateScenePayload + addWidget(input: AddWidgetInput!): AddWidgetPayload + updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload + updateWidgetAlignSystem( + input: UpdateWidgetAlignSystemInput! + ): UpdateWidgetAlignSystemPayload + removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload + installPlugin(input: InstallPluginInput!): InstallPluginPayload + uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload + uploadPlugin(input: UploadPluginInput!): UploadPluginPayload + upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload + addCluster(input: AddClusterInput!): AddClusterPayload + updateCluster(input: UpdateClusterInput!): UpdateClusterPayload + removeCluster(input: RemoveClusterInput!): RemoveClusterPayload + + # Dataset + updateDatasetSchema( + input: UpdateDatasetSchemaInput! + ): UpdateDatasetSchemaPayload + syncDataset(input: SyncDatasetInput!): SyncDatasetPayload + addDynamicDatasetSchema( + input: AddDynamicDatasetSchemaInput! + ): AddDynamicDatasetSchemaPayload + addDynamicDataset(input: AddDynamicDatasetInput!): AddDynamicDatasetPayload + removeDatasetSchema( + input: RemoveDatasetSchemaInput! + ): RemoveDatasetSchemaPayload + importDataset(input: ImportDatasetInput!): ImportDatasetPayload + importDatasetFromGoogleSheet( + input: ImportDatasetFromGoogleSheetInput! + ): ImportDatasetPayload + addDatasetSchema(input: AddDatasetSchemaInput!): AddDatasetSchemaPayload + + # Property + updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload + removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload + uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload + linkDatasetToPropertyValue( + input: LinkDatasetToPropertyValueInput! + ): PropertyFieldPayload + unlinkPropertyValue(input: UnlinkPropertyValueInput!): PropertyFieldPayload + addPropertyItem(input: AddPropertyItemInput!): PropertyItemPayload + movePropertyItem(input: MovePropertyItemInput!): PropertyItemPayload + removePropertyItem(input: RemovePropertyItemInput!): PropertyItemPayload + updatePropertyItems(input: UpdatePropertyItemInput!): PropertyItemPayload + + # Layer + addLayerItem(input: AddLayerItemInput!): AddLayerItemPayload + addLayerGroup(input: AddLayerGroupInput!): AddLayerGroupPayload + removeLayer(input: RemoveLayerInput!): RemoveLayerPayload + updateLayer(input: UpdateLayerInput!): UpdateLayerPayload + moveLayer(input: MoveLayerInput!): MoveLayerPayload + createInfobox(input: CreateInfoboxInput!): CreateInfoboxPayload + removeInfobox(input: RemoveInfoboxInput!): RemoveInfoboxPayload + addInfoboxField(input: AddInfoboxFieldInput!): AddInfoboxFieldPayload + moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload + removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload + importLayer(input: ImportLayerInput!): ImportLayerPayload + attachTagToLayer(input: AttachTagToLayerInput!): AttachTagToLayerPayload + detachTagFromLayer(input: DetachTagFromLayerInput!): DetachTagFromLayerPayload + + # Tag + createTagItem(input: CreateTagItemInput!): CreateTagItemPayload + createTagGroup(input: CreateTagGroupInput!): CreateTagGroupPayload + attachTagItemToGroup( + input: AttachTagItemToGroupInput! + ): AttachTagItemToGroupPayload + detachTagItemFromGroup( + input: DetachTagItemFromGroupInput! + ): DetachTagItemFromGroupPayload + updateTag(input: UpdateTagInput!): UpdateTagPayload + removeTag(input: RemoveTagInput!): RemoveTagPayload +} + +schema { + query: Query + mutation: Mutation +} diff --git a/server/schemas/plugin_manifest.json b/server/schemas/plugin_manifest.json new file mode 100644 index 000000000..0d2b089fd --- /dev/null +++ b/server/schemas/plugin_manifest.json @@ -0,0 +1,443 @@ +{ + "$id": "https://reearth.io/schemas/plugin_manifest.json", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Re:Earth plugin manifest", + "definitions": { + "id": { + "$id": "#id", + "type": "string", + "pattern": "^[A-Za-z]{1}[\\w-:.]{0,}$" + }, + "id?": { + "$id": "#id?", + "type": [ + "string", + "null" + ], + "pattern": "^[A-Za-z]{1}[\\w-:.]{0,}$" + }, + "valuetype": { + "$id": "#valuetype", + "type": "string", + "enum": [ + "bool", + "number", + "string", + "url", + "latlng", + "latlngheight", + "camera", + "typography", + "coordinates", + "polygon", + "rect", + "ref" + ] + }, + "propertyPointer": { + "$id": "#propertyPointer", + "type": [ + "object", + "null" + ], + "properties": { + "schemaGroupId": { + "type": "string" + }, + "fieldId": { + "type": "string" + } + }, + "required": [ + "schemaGroupId", + "fieldId" + ], + "additionalProperties": false + }, + "propertyLinkableFields": { + "$id": "#propertyLinkableFields", + "type": [ + "object", + "null" + ], + "properties": { + "latlng": { + "$ref": "#/definitions/propertyPointer" + }, + "url": { + "$ref": "#/definitions/propertyPointer" + } + }, + "additionalProperties": false + }, + "propertyCondition": { + "$id": "#propertyCondition", + "type": [ + "object", + "null" + ], + "properties": { + "field": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/valuetype" + }, + "value": {} + }, + "required": [ + "field", + "type", + "value" + ], + "additionalProperties": false + }, + "propertySchemaField": { + "$id": "#propertySchemaField", + "type": "object", + "properties": { + "id": { + "$ref": "#/definitions/id" + }, + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "type": { + "$ref": "#/definitions/valuetype" + }, + "prefix": { + "type": [ + "string", + "null" + ] + }, + "suffix": { + "type": [ + "string", + "null" + ] + }, + "defaultValue": {}, + "ui": { + "type": [ + "string", + "null" + ], + "enum": [ + "layer", + "color", + "multiline", + "selection", + "buttons", + "range", + "slider", + "image", + "video", + "file", + "camera_pose", + "datetime" + ] + }, + "min": { + "type": [ + "number", + "null" + ] + }, + "max": { + "type": [ + "number", + "null" + ] + }, + "choices": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "label": { + "type": "string" + }, + "icon": { + "type": "string" + } + }, + "required": [ + "key" + ], + "additionalProperties": false + } + }, + "availableIf": { + "$ref": "#/definitions/propertyCondition" + } + }, + "required": [ + "id", + "type", + "title" + ], + "additionalProperties": false + }, + "propertySchemaGroup": { + "$id": "#propertySchemaGroup", + "type": "object", + "properties": { + "id": { + "$ref": "#/definitions/id" + }, + "title": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "list": { + "type": "boolean" + }, + "availableIf": { + "$ref": "#/definitions/propertyCondition" + }, + "representativeField": { + "$ref": "#/definitions/id?" + }, + "fields": { + "type": "array", + "items": { + "$ref": "#/definitions/propertySchemaField" + } + } + }, + "required": [ + "id", + "title" + ], + "additionalProperties": false + }, + "propertySchema": { + "$id": "#propertySchema", + "type": [ + "object", + "null" + ], + "properties": { + "version": { + "type": "number" + }, + "linkable": { + "$ref": "#/definitions/propertyLinkableFields" + }, + "groups": { + "type": "array", + "items": { + "$ref": "#/definitions/propertySchemaGroup" + } + } + }, + "additionalProperties": false + }, + "location": { + "$id": "#location", + "type": [ + "object", + "null" + ], + "properties": { + "zone": { + "type": "string", + "enum": [ + "inner", + "outer" + ] + }, + "section": { + "type": "string", + "enum": [ + "left", + "center", + "right" + ] + }, + "area": { + "type": "string", + "enum": [ + "top", + "middle", + "bottom" + ] + } + } + }, + "extension": { + "$id": "#extension", + "type": "object", + "properties": { + "id": { + "$ref": "#/definitions/id" + }, + "name": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "icon": { + "type": [ + "string", + "null" + ] + }, + "visualizer": { + "type": [ + "string", + "null" + ], + "enum": [ + "cesium" + ] + }, + "type": { + "type": "string", + "enum": [ + "primitive", + "widget", + "block", + "visualizer", + "infobox", + "cluster" + ] + }, + "singleOnly": { + "type": [ + "boolean", + "null" + ] + }, + "widgetLayout": { + "type": [ + "object", + "null" + ], + "properties": { + "extendable": { + "type": [ + "object", + "null" + ], + "properties": { + "vertically": { + "type": [ + "boolean", + "null" + ] + }, + "horizontally": { + "type": [ + "boolean", + "null" + ] + } + } + }, + "extended": { + "type": [ + "boolean", + "null" + ] + }, + "floating": { + "type": "boolean" + }, + "defaultLocation": { + "$ref": "#/definitions/location" + } + } + }, + "schema": { + "$ref": "#/definitions/propertySchema" + } + }, + "required": [ + "id", + "name", + "type" + ], + "additionalProperties": false + }, + "root": { + "$id": "#root", + "type": "object", + "properties": { + "id": { + "$ref": "#/definitions/id" + }, + "name": { + "type": "string" + }, + "system": { + "type": "boolean" + }, + "version": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "repository": { + "type": [ + "string", + "null" + ] + }, + "author": { + "type": [ + "string", + "null" + ] + }, + "main": { + "type": [ + "string", + "null" + ] + }, + "extensions": { + "type": "array", + "items": { + "$ref": "#/definitions/extension" + } + }, + "schema": { + "$ref": "#/definitions/propertySchema" + } + }, + "required": [ + "id", + "name" + ], + "additionalProperties": false + } + }, + "$ref": "#/definitions/root" +} diff --git a/server/schemas/plugin_manifest_translation.json b/server/schemas/plugin_manifest_translation.json new file mode 100644 index 000000000..42e752458 --- /dev/null +++ b/server/schemas/plugin_manifest_translation.json @@ -0,0 +1,138 @@ +{ + "$id": "https://reearth.io/schemas/plugin_manifest_translation.json", + "$schema": "http://json-schema.org/draft-04/schema", + "title": "Re:Earth plugin manifest translation", + "definitions": { + "propertySchemaField": { + "$id": "#propertySchemaField", + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "prefix": { + "type": [ + "string", + "null" + ] + }, + "suffix": { + "type": [ + "string", + "null" + ] + }, + "choices": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[A-Za-z]{1}[\\w-:.]{0,}$": { + "type": "string" + } + } + } + } + }, + "propertySchemaGroup": { + "$id": "#propertySchemaGroup", + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "fields": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[A-Za-z]{1}[\\w-:.]{0,}$": { + "$ref": "#/definitions/propertySchemaField" + } + } + } + } + }, + "propertySchema": { + "$id": "#propertySchema", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[A-Za-z]{1}[\\w-:.]{0,}$": { + "$ref": "#/definitions/propertySchemaGroup" + } + } + }, + "extension": { + "$id": "#extension", + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "propertySchema": { + "$ref": "#/definitions/propertySchema" + } + } + }, + "root": { + "$id": "#root", + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "extensions": { + "type": "object", + "patternProperties": { + "^[A-Za-z]{1}[\\w-:.]{0,}$": { + "$ref": "#/definitions/extension" + } + } + }, + "schema": { + "$ref": "#/definitions/propertySchema" + } + } + } + }, + "$ref": "#/definitions/root" +} diff --git a/server/tools.go b/server/tools.go new file mode 100644 index 000000000..24546391f --- /dev/null +++ b/server/tools.go @@ -0,0 +1,9 @@ +//go:build tools + +package main + +import ( + _ "github.com/99designs/gqlgen" + _ "github.com/idubinskiy/schematyper" + _ "github.com/vektah/dataloaden" +) diff --git a/server/tools/cmd/gen/flag.go b/server/tools/cmd/gen/flag.go new file mode 100644 index 000000000..5211e1b50 --- /dev/null +++ b/server/tools/cmd/gen/flag.go @@ -0,0 +1,123 @@ +package main + +import ( + "fmt" +) + +type Flags map[string][]string + +func (f Flags) Bool(keys ...string) bool { + for _, k := range keys { + _, ok := f[k] + if !ok { + continue + } + return true + } + return false +} + +func (f Flags) String(keys ...string) string { + for _, k := range keys { + v, ok := f[k] + if !ok || len(v) == 0 { + continue + } + return v[0] + } + return "" +} + +func (f Flags) Strings(keys ...string) []string { + for _, k := range keys { + v, ok := f[k] + if !ok || len(v) == 0 { + continue + } + return v + } + return nil +} + +type flagSet struct { + args []string + parsed bool + flags Flags +} + +func Parse(args []string) (Flags, []string, error) { + fs := flagSet{} + if err := fs.parse(args); err != nil { + return nil, nil, err + } + if len(fs.args) == 0 { + fs.args = nil + } + return fs.flags, fs.args, nil +} + +func (f *flagSet) parse(arguments []string) error { + f.parsed = true + f.args = arguments + for { + seen, err := f.parseOne() + if seen { + continue + } + if err == nil { + break + } + } + return nil +} + +func (f *flagSet) parseOne() (bool, error) { + if len(f.args) == 0 { + return false, nil + } + s := f.args[0] + if len(s) < 2 || s[0] != '-' { + return false, nil + } + numMinuses := 1 + if s[1] == '-' { + numMinuses++ + if len(s) == 2 { // "--" terminates the flags + f.args = f.args[1:] + return false, nil + } + } + name := s[numMinuses:] + if len(name) == 0 || name[0] == '-' || name[0] == '=' { + return false, fmt.Errorf("bad flag syntax: %s", s) + } + + // it's a flag. does it have an argument? + f.args = f.args[1:] + hasValue := false + value := "" + for i := 1; i < len(name); i++ { // equals cannot be first + if name[i] == '=' { + value = name[i+1:] + hasValue = true + name = name[0:i] + break + } + } + + var actualValue string + if hasValue { + actualValue = value + } + + if existingValue, alreadythere := f.flags[name]; alreadythere { + f.flags[name] = append(existingValue, actualValue) + } else { + if f.flags == nil { + f.flags = make(map[string][]string) + } + f.flags[name] = []string{actualValue} + } + + return true, nil +} diff --git a/server/tools/cmd/gen/flag_test.go b/server/tools/cmd/gen/flag_test.go new file mode 100644 index 000000000..1d99fba63 --- /dev/null +++ b/server/tools/cmd/gen/flag_test.go @@ -0,0 +1,130 @@ +package main + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParse(t *testing.T) { + flags, args, err := Parse(nil) + assert.NoError(t, err) + assert.Equal(t, Flags(nil), flags) + assert.Equal(t, []string(nil), args) + + flags, args, err = Parse([]string{}) + assert.NoError(t, err) + assert.Equal(t, Flags(nil), flags) + assert.Equal(t, []string(nil), args) + + flags, args, err = Parse([]string{"-a=b"}) + assert.NoError(t, err) + assert.Equal(t, Flags(map[string][]string{ + "a": {"b"}, + }), flags) + assert.Equal(t, []string(nil), args) + + flags, args, err = Parse([]string{"-a"}) + assert.NoError(t, err) + assert.Equal(t, Flags(map[string][]string{ + "a": {""}, + }), flags) + assert.Equal(t, []string(nil), args) + + flags, args, err = Parse([]string{"-a", "-b"}) + assert.NoError(t, err) + assert.Equal(t, Flags(map[string][]string{ + "a": {""}, + "b": {""}, + }), flags) + assert.Equal(t, []string(nil), args) + + flags, args, err = Parse([]string{"--hoge=a", "--hoge=b"}) + assert.NoError(t, err) + assert.Equal(t, Flags(map[string][]string{ + "hoge": {"a", "b"}, + }), flags) + assert.Equal(t, []string(nil), args) + + flags, args, err = Parse([]string{"aaa", "bbb"}) + assert.NoError(t, err) + assert.Equal(t, Flags(nil), flags) + assert.Equal(t, []string{"aaa", "bbb"}, args) + + flags, args, err = Parse([]string{"aaa", "-a", "--", "-b", "bbb"}) + assert.NoError(t, err) + assert.Equal(t, Flags(nil), flags) + assert.Equal(t, []string{"aaa", "-a", "--", "-b", "bbb"}, args) + + flags, args, err = Parse([]string{"-a", "--", "-b", "bbb"}) + assert.NoError(t, err) + assert.Equal(t, Flags(map[string][]string{ + "a": {""}, + }), flags) + assert.Equal(t, []string{"-b", "bbb"}, args) +} + +func TestFlags_Bool(t *testing.T) { + assert.Equal(t, false, Flags(nil).Bool("hoge")) + assert.Equal(t, true, Flags(map[string][]string{ + "hoge": nil, + }).Bool("hoge")) + assert.Equal(t, true, Flags(map[string][]string{ + "hoge": {""}, + }).Bool("hoge")) + assert.Equal(t, true, Flags(map[string][]string{ + "hoge": {"a"}, + "h": {"b"}, + }).Bool("hoge")) + + assert.Equal(t, false, Flags(nil).Bool("hoge", "h")) + assert.Equal(t, true, Flags(map[string][]string{ + "h": nil, + }).Bool("hoge", "h")) + assert.Equal(t, true, Flags(map[string][]string{ + "h": {""}, + }).Bool("hoge", "h")) + assert.Equal(t, true, Flags(map[string][]string{ + "h": {"a"}, + }).Bool("hoge", "h")) +} + +func TestFlags_String(t *testing.T) { + assert.Equal(t, "", Flags(nil).String("hoge")) + assert.Equal(t, "", Flags(map[string][]string{ + "hoge": nil, + }).String("hoge")) + assert.Equal(t, "a", Flags(map[string][]string{ + "hoge": {"a"}, + "h": {"b"}, + }).String("hoge")) + assert.Equal(t, "a", Flags(map[string][]string{ + "hoge": {"a", "b"}, + }).String("hoge")) + + assert.Equal(t, "", Flags(nil).String("hoge", "h")) + assert.Equal(t, "", Flags(map[string][]string{ + "h": nil, + }).String("hoge", "h")) + assert.Equal(t, "a", Flags(map[string][]string{ + "h": {"a"}, + }).String("hoge", "h")) + assert.Equal(t, "a", Flags(map[string][]string{ + "h": {"a", "b"}, + }).String("hoge", "h")) +} + +func TestFlags_Strings(t *testing.T) { + assert.Equal(t, []string(nil), Flags(nil).Strings("hoge")) + assert.Equal(t, []string{""}, Flags(map[string][]string{ + "hoge": {""}, + }).Strings("hoge")) + assert.Equal(t, []string{"a", "b"}, Flags(map[string][]string{ + "hoge": {"a", "b"}, + "h": {"a"}, + }).Strings("hoge")) + assert.Equal(t, []string(nil), Flags(nil).Strings("hoge", "h")) + assert.Equal(t, []string{"a"}, Flags(map[string][]string{ + "h": {"a"}, + }).Strings("hoge", "h")) +} diff --git a/server/tools/cmd/gen/main.go b/server/tools/cmd/gen/main.go new file mode 100644 index 000000000..def8f0e70 --- /dev/null +++ b/server/tools/cmd/gen/main.go @@ -0,0 +1,98 @@ +package main + +import ( + "bytes" + "html/template" + "log" + "os" + "path/filepath" + + "github.com/iancoleman/strcase" + "github.com/pkg/errors" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/imports" +) + +var funcs = template.FuncMap{ + "snake": func(str string) string { + return strcase.ToSnake(str) + }, + "kebab": func(str string) string { + return strcase.ToKebab(str) + }, + "camel": func(str string) string { + return strcase.ToCamel(str) + }, + "lowercamel": func(str string) string { + return strcase.ToLowerCamel(str) + }, +} + +type Data struct { + PackageName string + Output string + Flags Flags + Args []string +} + +func main() { + log.SetPrefix("gen: ") + log.SetFlags(0) + + if err := run(); err != nil { + log.Fatal(err) + } +} + +func run() error { + flags, args, err := Parse(os.Args[1:]) + if err != nil { + return err + } + + output := flags.String("", "output") + if output == "" { + return errors.New("output option is required") + } + + templatePaths := flags.Strings("", "template") + if len(templatePaths) == 0 { + return errors.New("template option is required") + } + + pkgs, err := packages.Load(&packages.Config{Dir: filepath.Dir(output)}, ".") + if err != nil { + return errors.Wrap(err, "failed to load package") + } + + templ, err := template.New(filepath.Base(templatePaths[0])).Funcs(funcs).ParseFiles(templatePaths...) + if err != nil { + return errors.Wrap(err, "unable to load templates") + } + + data := Data{ + PackageName: pkgs[0].Name, + Output: output, + Flags: flags, + Args: args, + } + + buf := &bytes.Buffer{} + buf.WriteString("// Code generated by gen, DO NOT EDIT.\n\n") + + if err := templ.Execute(buf, data); err != nil { + return errors.Wrap(err, "unable to generate code") + } + + src, err := imports.Process("", buf.Bytes(), nil) + if err != nil { + return errors.Wrap(err, "unable to gofmt") + } + + err = os.WriteFile(output, src, 0644) + if err != nil { + return errors.Wrap(err, "unable to write file") + } + + return nil +} diff --git a/server/tools/cmd/migrategen/main.go b/server/tools/cmd/migrategen/main.go new file mode 100644 index 000000000..8f891e423 --- /dev/null +++ b/server/tools/cmd/migrategen/main.go @@ -0,0 +1,124 @@ +package main + +import ( + "bytes" + "fmt" + "log" + "os" + "path/filepath" + "strings" + "text/template" + "time" + + "github.com/iancoleman/strcase" +) + +var dest = []string{"internal", "infrastructure", "mongo", "migration"} + +func main() { + log.SetPrefix("migrategen: ") + log.SetFlags(0) + + if err := run(); err != nil { + log.Fatal(err) + } +} + +func run() error { + name := strings.Join(os.Args[1:], " ") + snake := strcase.ToSnake(name) + camel := strcase.ToCamel(name) + key := time.Now().Format("060102150405") + + data := migration{ + Key: key, + Name: camel, + } + + files, err := os.ReadDir(filepath.Join(dest...)) + if err != nil { + return fmt.Errorf("unable to get dir: %w", err) + } + + migrations := make([]migration, 0, len(files)+1) + for _, file := range files { + if file.IsDir() { + continue + } + m := migrationFromFileName(file.Name()) + if m == nil { + continue + } + migrations = append(migrations, *m) + } + migrations = append(migrations, data) + + buf := bytes.NewBuffer(nil) + if err := templ.Execute(buf, data); err != nil { + return fmt.Errorf("unable to generate code: %w", err) + } + + if err := os.WriteFile(filepath.Join(append(dest, key+"_"+snake+".go")...), buf.Bytes(), 0644); err != nil { + return fmt.Errorf("unable to write file: %w", err) + } + + buf = bytes.NewBuffer(nil) + if err := templ2.Execute(buf, migrations); err != nil { + return fmt.Errorf("unable to generate code: %w", err) + } + + if err := os.WriteFile(filepath.Join(append(dest, "migrations.go")...), buf.Bytes(), 0644); err != nil { + return fmt.Errorf("unable to write file: %w", err) + } + + return nil +} + +type migration struct { + Key string + Name string +} + +func migrationFromFileName(n string) (m *migration) { + if filepath.Ext(n) != ".go" { + return + } + s := strings.SplitN(n[:len(n)-3], "_", 2) + if len(s) != 2 { + return + } + m = &migration{ + Key: s[0], + Name: strcase.ToCamel(s[1]), + } + return +} + +var templ = template.Must(template.New("generated").Parse(`package migration + +import "context" + +func {{.Name}}(ctx context.Context, c DBClient) error { + // TODO: Write your migration code here + + // WARNING: + // If the migration takes too long, the deployment may fail in a serverless environment. + // Set the batch size to as large a value as possible without using up the RAM of the deployment destination. + + return nil +} +`)) + +var templ2 = template.Must(template.New("generated2").Parse(`// Code generated by migrategen, DO NOT EDIT. + +package migration + +// To add a new migration, run go run ./tools/cmd/migrategen migration_name + +// WARNING: +// If the migration takes too long, the deployment may fail in a serverless environment. +// Set the batch size to as large a value as possible without using up the RAM of the deployment destination. +var migrations = map[int64]MigrationFunc{ +{{range .}} {{.Key}}: {{.Name}}, +{{end}}} +`)) diff --git a/server/tools/cmd/shapefiletest/main.go b/server/tools/cmd/shapefiletest/main.go new file mode 100644 index 000000000..da7b0af6c --- /dev/null +++ b/server/tools/cmd/shapefiletest/main.go @@ -0,0 +1,46 @@ +package main + +import ( + "log" + "strconv" + + "github.com/jonas-p/go-shp" +) + +func main() { + // points to write + points := []shp.Point{ + {X: 10.0, Y: 10.0}, + {X: 10.0, Y: 15.0}, + {X: 15.0, Y: 15.0}, + {X: 15.0, Y: 10.0}, + } + + // fields to write + fields := []shp.Field{ + // String attribute field with length 25 + shp.StringField("NAME", 25), + } + + // create and open a shapefile for writing points + shape, err := shp.Create("points.shp", shp.POINT) + if err != nil { + log.Fatal(err) + } + defer shape.Close() + + // setup fields for attributes + if err := shape.SetFields(fields); err != nil { + log.Fatal(err) + } + + // write points and attributes + for n, point := range points { + shape.Write(&point) + + // write attribute for object n for field 0 (NAME) + if err := shape.WriteAttribute(n, 0, "Point "+strconv.Itoa(n+1)); err != nil { + log.Fatal(err) + } + } +}